mirror of
https://github.com/android-actions/setup-android
synced 2024-11-26 06:58:26 -08:00
3404b45d4c
Will be easier to debug production errors later.
8366 lines
No EOL
253 KiB
JavaScript
8366 lines
No EOL
253 KiB
JavaScript
module.exports =
|
|
/******/ (() => { // webpackBootstrap
|
|
/******/ var __webpack_modules__ = ({
|
|
|
|
/***/ 7351:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const os = __importStar(__nccwpck_require__(2087));
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
/**
|
|
* Commands
|
|
*
|
|
* Command Format:
|
|
* ::name key=value,key=value::message
|
|
*
|
|
* Examples:
|
|
* ::warning::This is the message
|
|
* ::set-env name=MY_VAR::some value
|
|
*/
|
|
function issueCommand(command, properties, message) {
|
|
const cmd = new Command(command, properties, message);
|
|
process.stdout.write(cmd.toString() + os.EOL);
|
|
}
|
|
exports.issueCommand = issueCommand;
|
|
function issue(name, message = '') {
|
|
issueCommand(name, {}, message);
|
|
}
|
|
exports.issue = issue;
|
|
const CMD_STRING = '::';
|
|
class Command {
|
|
constructor(command, properties, message) {
|
|
if (!command) {
|
|
command = 'missing.command';
|
|
}
|
|
this.command = command;
|
|
this.properties = properties;
|
|
this.message = message;
|
|
}
|
|
toString() {
|
|
let cmdStr = CMD_STRING + this.command;
|
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
|
cmdStr += ' ';
|
|
let first = true;
|
|
for (const key in this.properties) {
|
|
if (this.properties.hasOwnProperty(key)) {
|
|
const val = this.properties[key];
|
|
if (val) {
|
|
if (first) {
|
|
first = false;
|
|
}
|
|
else {
|
|
cmdStr += ',';
|
|
}
|
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
|
return cmdStr;
|
|
}
|
|
}
|
|
function escapeData(s) {
|
|
return utils_1.toCommandValue(s)
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A');
|
|
}
|
|
function escapeProperty(s) {
|
|
return utils_1.toCommandValue(s)
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A')
|
|
.replace(/:/g, '%3A')
|
|
.replace(/,/g, '%2C');
|
|
}
|
|
//# sourceMappingURL=command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2186:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const command_1 = __nccwpck_require__(7351);
|
|
const file_command_1 = __nccwpck_require__(717);
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
const os = __importStar(__nccwpck_require__(2087));
|
|
const path = __importStar(__nccwpck_require__(5622));
|
|
/**
|
|
* The code to exit an action
|
|
*/
|
|
var ExitCode;
|
|
(function (ExitCode) {
|
|
/**
|
|
* A code indicating that the action was successful
|
|
*/
|
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
|
/**
|
|
* A code indicating that the action was a failure
|
|
*/
|
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
|
//-----------------------------------------------------------------------
|
|
// Variables
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets env variable for this action and future actions in the job
|
|
* @param name the name of the variable to set
|
|
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function exportVariable(name, val) {
|
|
const convertedVal = utils_1.toCommandValue(val);
|
|
process.env[name] = convertedVal;
|
|
const filePath = process.env['GITHUB_ENV'] || '';
|
|
if (filePath) {
|
|
const delimiter = '_GitHubActionsFileCommandDelimeter_';
|
|
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
|
file_command_1.issueCommand('ENV', commandValue);
|
|
}
|
|
else {
|
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
|
}
|
|
}
|
|
exports.exportVariable = exportVariable;
|
|
/**
|
|
* Registers a secret which will get masked from logs
|
|
* @param secret value of the secret
|
|
*/
|
|
function setSecret(secret) {
|
|
command_1.issueCommand('add-mask', {}, secret);
|
|
}
|
|
exports.setSecret = setSecret;
|
|
/**
|
|
* Prepends inputPath to the PATH (for this action and future actions)
|
|
* @param inputPath
|
|
*/
|
|
function addPath(inputPath) {
|
|
const filePath = process.env['GITHUB_PATH'] || '';
|
|
if (filePath) {
|
|
file_command_1.issueCommand('PATH', inputPath);
|
|
}
|
|
else {
|
|
command_1.issueCommand('add-path', {}, inputPath);
|
|
}
|
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
|
}
|
|
exports.addPath = addPath;
|
|
/**
|
|
* Gets the value of an input. The value is also trimmed.
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns string
|
|
*/
|
|
function getInput(name, options) {
|
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
|
if (options && options.required && !val) {
|
|
throw new Error(`Input required and not supplied: ${name}`);
|
|
}
|
|
return val.trim();
|
|
}
|
|
exports.getInput = getInput;
|
|
/**
|
|
* Sets the value of an output.
|
|
*
|
|
* @param name name of the output to set
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function setOutput(name, value) {
|
|
command_1.issueCommand('set-output', { name }, value);
|
|
}
|
|
exports.setOutput = setOutput;
|
|
/**
|
|
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
|
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
|
*
|
|
*/
|
|
function setCommandEcho(enabled) {
|
|
command_1.issue('echo', enabled ? 'on' : 'off');
|
|
}
|
|
exports.setCommandEcho = setCommandEcho;
|
|
//-----------------------------------------------------------------------
|
|
// Results
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets the action status to failed.
|
|
* When the action exits it will be with an exit code of 1
|
|
* @param message add error issue message
|
|
*/
|
|
function setFailed(message) {
|
|
process.exitCode = ExitCode.Failure;
|
|
error(message);
|
|
}
|
|
exports.setFailed = setFailed;
|
|
//-----------------------------------------------------------------------
|
|
// Logging Commands
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Gets whether Actions Step Debug is on or not
|
|
*/
|
|
function isDebug() {
|
|
return process.env['RUNNER_DEBUG'] === '1';
|
|
}
|
|
exports.isDebug = isDebug;
|
|
/**
|
|
* Writes debug message to user log
|
|
* @param message debug message
|
|
*/
|
|
function debug(message) {
|
|
command_1.issueCommand('debug', {}, message);
|
|
}
|
|
exports.debug = debug;
|
|
/**
|
|
* Adds an error issue
|
|
* @param message error issue message. Errors will be converted to string via toString()
|
|
*/
|
|
function error(message) {
|
|
command_1.issue('error', message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.error = error;
|
|
/**
|
|
* Adds an warning issue
|
|
* @param message warning issue message. Errors will be converted to string via toString()
|
|
*/
|
|
function warning(message) {
|
|
command_1.issue('warning', message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.warning = warning;
|
|
/**
|
|
* Writes info to log with console.log.
|
|
* @param message info message
|
|
*/
|
|
function info(message) {
|
|
process.stdout.write(message + os.EOL);
|
|
}
|
|
exports.info = info;
|
|
/**
|
|
* Begin an output group.
|
|
*
|
|
* Output until the next `groupEnd` will be foldable in this group
|
|
*
|
|
* @param name The name of the output group
|
|
*/
|
|
function startGroup(name) {
|
|
command_1.issue('group', name);
|
|
}
|
|
exports.startGroup = startGroup;
|
|
/**
|
|
* End an output group.
|
|
*/
|
|
function endGroup() {
|
|
command_1.issue('endgroup');
|
|
}
|
|
exports.endGroup = endGroup;
|
|
/**
|
|
* Wrap an asynchronous function call in a group.
|
|
*
|
|
* Returns the same type as the function itself.
|
|
*
|
|
* @param name The name of the group
|
|
* @param fn The function to wrap in the group
|
|
*/
|
|
function group(name, fn) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
startGroup(name);
|
|
let result;
|
|
try {
|
|
result = yield fn();
|
|
}
|
|
finally {
|
|
endGroup();
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports.group = group;
|
|
//-----------------------------------------------------------------------
|
|
// Wrapper action state
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
|
*
|
|
* @param name name of the state to store
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function saveState(name, value) {
|
|
command_1.issueCommand('save-state', { name }, value);
|
|
}
|
|
exports.saveState = saveState;
|
|
/**
|
|
* Gets the value of an state set by this action's main execution.
|
|
*
|
|
* @param name name of the state to get
|
|
* @returns string
|
|
*/
|
|
function getState(name) {
|
|
return process.env[`STATE_${name}`] || '';
|
|
}
|
|
exports.getState = getState;
|
|
//# sourceMappingURL=core.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 717:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
// For internal use, subject to change.
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
// We use any as a valid input type
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
const fs = __importStar(__nccwpck_require__(5747));
|
|
const os = __importStar(__nccwpck_require__(2087));
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
function issueCommand(command, message) {
|
|
const filePath = process.env[`GITHUB_${command}`];
|
|
if (!filePath) {
|
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
|
}
|
|
if (!fs.existsSync(filePath)) {
|
|
throw new Error(`Missing file at path: ${filePath}`);
|
|
}
|
|
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
|
encoding: 'utf8'
|
|
});
|
|
}
|
|
exports.issueCommand = issueCommand;
|
|
//# sourceMappingURL=file-command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5278:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
// We use any as a valid input type
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
/**
|
|
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
|
* @param input input to sanitize into a string
|
|
*/
|
|
function toCommandValue(input) {
|
|
if (input === null || input === undefined) {
|
|
return '';
|
|
}
|
|
else if (typeof input === 'string' || input instanceof String) {
|
|
return input;
|
|
}
|
|
return JSON.stringify(input);
|
|
}
|
|
exports.toCommandValue = toCommandValue;
|
|
//# sourceMappingURL=utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1514:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const tr = __importStar(__nccwpck_require__(8159));
|
|
/**
|
|
* Exec a command.
|
|
* Output will be streamed to the live console.
|
|
* Returns promise with return code
|
|
*
|
|
* @param commandLine command to execute (can include additional args). Must be correctly escaped.
|
|
* @param args optional arguments for tool. Escaping is handled by the lib.
|
|
* @param options optional exec options. See ExecOptions
|
|
* @returns Promise<number> exit code
|
|
*/
|
|
function exec(commandLine, args, options) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const commandArgs = tr.argStringToArray(commandLine);
|
|
if (commandArgs.length === 0) {
|
|
throw new Error(`Parameter 'commandLine' cannot be null or empty.`);
|
|
}
|
|
// Path to tool to execute should be first arg
|
|
const toolPath = commandArgs[0];
|
|
args = commandArgs.slice(1).concat(args || []);
|
|
const runner = new tr.ToolRunner(toolPath, args, options);
|
|
return runner.exec();
|
|
});
|
|
}
|
|
exports.exec = exec;
|
|
//# sourceMappingURL=exec.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8159:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const os = __importStar(__nccwpck_require__(2087));
|
|
const events = __importStar(__nccwpck_require__(8614));
|
|
const child = __importStar(__nccwpck_require__(3129));
|
|
const path = __importStar(__nccwpck_require__(5622));
|
|
const io = __importStar(__nccwpck_require__(7436));
|
|
const ioUtil = __importStar(__nccwpck_require__(1962));
|
|
/* eslint-disable @typescript-eslint/unbound-method */
|
|
const IS_WINDOWS = process.platform === 'win32';
|
|
/*
|
|
* Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
|
|
*/
|
|
class ToolRunner extends events.EventEmitter {
|
|
constructor(toolPath, args, options) {
|
|
super();
|
|
if (!toolPath) {
|
|
throw new Error("Parameter 'toolPath' cannot be null or empty.");
|
|
}
|
|
this.toolPath = toolPath;
|
|
this.args = args || [];
|
|
this.options = options || {};
|
|
}
|
|
_debug(message) {
|
|
if (this.options.listeners && this.options.listeners.debug) {
|
|
this.options.listeners.debug(message);
|
|
}
|
|
}
|
|
_getCommandString(options, noPrefix) {
|
|
const toolPath = this._getSpawnFileName();
|
|
const args = this._getSpawnArgs(options);
|
|
let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
|
|
if (IS_WINDOWS) {
|
|
// Windows + cmd file
|
|
if (this._isCmdFile()) {
|
|
cmd += toolPath;
|
|
for (const a of args) {
|
|
cmd += ` ${a}`;
|
|
}
|
|
}
|
|
// Windows + verbatim
|
|
else if (options.windowsVerbatimArguments) {
|
|
cmd += `"${toolPath}"`;
|
|
for (const a of args) {
|
|
cmd += ` ${a}`;
|
|
}
|
|
}
|
|
// Windows (regular)
|
|
else {
|
|
cmd += this._windowsQuoteCmdArg(toolPath);
|
|
for (const a of args) {
|
|
cmd += ` ${this._windowsQuoteCmdArg(a)}`;
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
// OSX/Linux - this can likely be improved with some form of quoting.
|
|
// creating processes on Unix is fundamentally different than Windows.
|
|
// on Unix, execvp() takes an arg array.
|
|
cmd += toolPath;
|
|
for (const a of args) {
|
|
cmd += ` ${a}`;
|
|
}
|
|
}
|
|
return cmd;
|
|
}
|
|
_processLineBuffer(data, strBuffer, onLine) {
|
|
try {
|
|
let s = strBuffer + data.toString();
|
|
let n = s.indexOf(os.EOL);
|
|
while (n > -1) {
|
|
const line = s.substring(0, n);
|
|
onLine(line);
|
|
// the rest of the string ...
|
|
s = s.substring(n + os.EOL.length);
|
|
n = s.indexOf(os.EOL);
|
|
}
|
|
strBuffer = s;
|
|
}
|
|
catch (err) {
|
|
// streaming lines to console is best effort. Don't fail a build.
|
|
this._debug(`error processing line. Failed with error ${err}`);
|
|
}
|
|
}
|
|
_getSpawnFileName() {
|
|
if (IS_WINDOWS) {
|
|
if (this._isCmdFile()) {
|
|
return process.env['COMSPEC'] || 'cmd.exe';
|
|
}
|
|
}
|
|
return this.toolPath;
|
|
}
|
|
_getSpawnArgs(options) {
|
|
if (IS_WINDOWS) {
|
|
if (this._isCmdFile()) {
|
|
let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
|
|
for (const a of this.args) {
|
|
argline += ' ';
|
|
argline += options.windowsVerbatimArguments
|
|
? a
|
|
: this._windowsQuoteCmdArg(a);
|
|
}
|
|
argline += '"';
|
|
return [argline];
|
|
}
|
|
}
|
|
return this.args;
|
|
}
|
|
_endsWith(str, end) {
|
|
return str.endsWith(end);
|
|
}
|
|
_isCmdFile() {
|
|
const upperToolPath = this.toolPath.toUpperCase();
|
|
return (this._endsWith(upperToolPath, '.CMD') ||
|
|
this._endsWith(upperToolPath, '.BAT'));
|
|
}
|
|
_windowsQuoteCmdArg(arg) {
|
|
// for .exe, apply the normal quoting rules that libuv applies
|
|
if (!this._isCmdFile()) {
|
|
return this._uvQuoteCmdArg(arg);
|
|
}
|
|
// otherwise apply quoting rules specific to the cmd.exe command line parser.
|
|
// the libuv rules are generic and are not designed specifically for cmd.exe
|
|
// command line parser.
|
|
//
|
|
// for a detailed description of the cmd.exe command line parser, refer to
|
|
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
|
|
// need quotes for empty arg
|
|
if (!arg) {
|
|
return '""';
|
|
}
|
|
// determine whether the arg needs to be quoted
|
|
const cmdSpecialChars = [
|
|
' ',
|
|
'\t',
|
|
'&',
|
|
'(',
|
|
')',
|
|
'[',
|
|
']',
|
|
'{',
|
|
'}',
|
|
'^',
|
|
'=',
|
|
';',
|
|
'!',
|
|
"'",
|
|
'+',
|
|
',',
|
|
'`',
|
|
'~',
|
|
'|',
|
|
'<',
|
|
'>',
|
|
'"'
|
|
];
|
|
let needsQuotes = false;
|
|
for (const char of arg) {
|
|
if (cmdSpecialChars.some(x => x === char)) {
|
|
needsQuotes = true;
|
|
break;
|
|
}
|
|
}
|
|
// short-circuit if quotes not needed
|
|
if (!needsQuotes) {
|
|
return arg;
|
|
}
|
|
// the following quoting rules are very similar to the rules that by libuv applies.
|
|
//
|
|
// 1) wrap the string in quotes
|
|
//
|
|
// 2) double-up quotes - i.e. " => ""
|
|
//
|
|
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
|
|
// doesn't work well with a cmd.exe command line.
|
|
//
|
|
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
|
|
// for example, the command line:
|
|
// foo.exe "myarg:""my val"""
|
|
// is parsed by a .NET console app into an arg array:
|
|
// [ "myarg:\"my val\"" ]
|
|
// which is the same end result when applying libuv quoting rules. although the actual
|
|
// command line from libuv quoting rules would look like:
|
|
// foo.exe "myarg:\"my val\""
|
|
//
|
|
// 3) double-up slashes that precede a quote,
|
|
// e.g. hello \world => "hello \world"
|
|
// hello\"world => "hello\\""world"
|
|
// hello\\"world => "hello\\\\""world"
|
|
// hello world\ => "hello world\\"
|
|
//
|
|
// technically this is not required for a cmd.exe command line, or the batch argument parser.
|
|
// the reasons for including this as a .cmd quoting rule are:
|
|
//
|
|
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
|
|
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
|
|
//
|
|
// b) it's what we've been doing previously (by deferring to node default behavior) and we
|
|
// haven't heard any complaints about that aspect.
|
|
//
|
|
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
|
|
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
|
|
// by using %%.
|
|
//
|
|
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
|
|
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
|
|
//
|
|
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
|
|
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
|
|
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
|
|
// to an external program.
|
|
//
|
|
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
|
|
// % can be escaped within a .cmd file.
|
|
let reverse = '"';
|
|
let quoteHit = true;
|
|
for (let i = arg.length; i > 0; i--) {
|
|
// walk the string in reverse
|
|
reverse += arg[i - 1];
|
|
if (quoteHit && arg[i - 1] === '\\') {
|
|
reverse += '\\'; // double the slash
|
|
}
|
|
else if (arg[i - 1] === '"') {
|
|
quoteHit = true;
|
|
reverse += '"'; // double the quote
|
|
}
|
|
else {
|
|
quoteHit = false;
|
|
}
|
|
}
|
|
reverse += '"';
|
|
return reverse
|
|
.split('')
|
|
.reverse()
|
|
.join('');
|
|
}
|
|
_uvQuoteCmdArg(arg) {
|
|
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
|
|
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
|
|
// is used.
|
|
//
|
|
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
|
|
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
|
|
// pasting copyright notice from Node within this function:
|
|
//
|
|
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
|
//
|
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
// of this software and associated documentation files (the "Software"), to
|
|
// deal in the Software without restriction, including without limitation the
|
|
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
// sell copies of the Software, and to permit persons to whom the Software is
|
|
// furnished to do so, subject to the following conditions:
|
|
//
|
|
// The above copyright notice and this permission notice shall be included in
|
|
// all copies or substantial portions of the Software.
|
|
//
|
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
// IN THE SOFTWARE.
|
|
if (!arg) {
|
|
// Need double quotation for empty argument
|
|
return '""';
|
|
}
|
|
if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
|
|
// No quotation needed
|
|
return arg;
|
|
}
|
|
if (!arg.includes('"') && !arg.includes('\\')) {
|
|
// No embedded double quotes or backslashes, so I can just wrap
|
|
// quote marks around the whole thing.
|
|
return `"${arg}"`;
|
|
}
|
|
// Expected input/output:
|
|
// input : hello"world
|
|
// output: "hello\"world"
|
|
// input : hello""world
|
|
// output: "hello\"\"world"
|
|
// input : hello\world
|
|
// output: hello\world
|
|
// input : hello\\world
|
|
// output: hello\\world
|
|
// input : hello\"world
|
|
// output: "hello\\\"world"
|
|
// input : hello\\"world
|
|
// output: "hello\\\\\"world"
|
|
// input : hello world\
|
|
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
|
|
// but it appears the comment is wrong, it should be "hello world\\"
|
|
let reverse = '"';
|
|
let quoteHit = true;
|
|
for (let i = arg.length; i > 0; i--) {
|
|
// walk the string in reverse
|
|
reverse += arg[i - 1];
|
|
if (quoteHit && arg[i - 1] === '\\') {
|
|
reverse += '\\';
|
|
}
|
|
else if (arg[i - 1] === '"') {
|
|
quoteHit = true;
|
|
reverse += '\\';
|
|
}
|
|
else {
|
|
quoteHit = false;
|
|
}
|
|
}
|
|
reverse += '"';
|
|
return reverse
|
|
.split('')
|
|
.reverse()
|
|
.join('');
|
|
}
|
|
_cloneExecOptions(options) {
|
|
options = options || {};
|
|
const result = {
|
|
cwd: options.cwd || process.cwd(),
|
|
env: options.env || process.env,
|
|
silent: options.silent || false,
|
|
windowsVerbatimArguments: options.windowsVerbatimArguments || false,
|
|
failOnStdErr: options.failOnStdErr || false,
|
|
ignoreReturnCode: options.ignoreReturnCode || false,
|
|
delay: options.delay || 10000
|
|
};
|
|
result.outStream = options.outStream || process.stdout;
|
|
result.errStream = options.errStream || process.stderr;
|
|
return result;
|
|
}
|
|
_getSpawnOptions(options, toolPath) {
|
|
options = options || {};
|
|
const result = {};
|
|
result.cwd = options.cwd;
|
|
result.env = options.env;
|
|
result['windowsVerbatimArguments'] =
|
|
options.windowsVerbatimArguments || this._isCmdFile();
|
|
if (options.windowsVerbatimArguments) {
|
|
result.argv0 = `"${toolPath}"`;
|
|
}
|
|
return result;
|
|
}
|
|
/**
|
|
* Exec a tool.
|
|
* Output will be streamed to the live console.
|
|
* Returns promise with return code
|
|
*
|
|
* @param tool path to tool to exec
|
|
* @param options optional exec options. See ExecOptions
|
|
* @returns number
|
|
*/
|
|
exec() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// root the tool path if it is unrooted and contains relative pathing
|
|
if (!ioUtil.isRooted(this.toolPath) &&
|
|
(this.toolPath.includes('/') ||
|
|
(IS_WINDOWS && this.toolPath.includes('\\')))) {
|
|
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
|
|
this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);
|
|
}
|
|
// if the tool is only a file name, then resolve it from the PATH
|
|
// otherwise verify it exists (add extension on Windows if necessary)
|
|
this.toolPath = yield io.which(this.toolPath, true);
|
|
return new Promise((resolve, reject) => {
|
|
this._debug(`exec tool: ${this.toolPath}`);
|
|
this._debug('arguments:');
|
|
for (const arg of this.args) {
|
|
this._debug(` ${arg}`);
|
|
}
|
|
const optionsNonNull = this._cloneExecOptions(this.options);
|
|
if (!optionsNonNull.silent && optionsNonNull.outStream) {
|
|
optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
|
|
}
|
|
const state = new ExecState(optionsNonNull, this.toolPath);
|
|
state.on('debug', (message) => {
|
|
this._debug(message);
|
|
});
|
|
const fileName = this._getSpawnFileName();
|
|
const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
|
|
const stdbuffer = '';
|
|
if (cp.stdout) {
|
|
cp.stdout.on('data', (data) => {
|
|
if (this.options.listeners && this.options.listeners.stdout) {
|
|
this.options.listeners.stdout(data);
|
|
}
|
|
if (!optionsNonNull.silent && optionsNonNull.outStream) {
|
|
optionsNonNull.outStream.write(data);
|
|
}
|
|
this._processLineBuffer(data, stdbuffer, (line) => {
|
|
if (this.options.listeners && this.options.listeners.stdline) {
|
|
this.options.listeners.stdline(line);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
const errbuffer = '';
|
|
if (cp.stderr) {
|
|
cp.stderr.on('data', (data) => {
|
|
state.processStderr = true;
|
|
if (this.options.listeners && this.options.listeners.stderr) {
|
|
this.options.listeners.stderr(data);
|
|
}
|
|
if (!optionsNonNull.silent &&
|
|
optionsNonNull.errStream &&
|
|
optionsNonNull.outStream) {
|
|
const s = optionsNonNull.failOnStdErr
|
|
? optionsNonNull.errStream
|
|
: optionsNonNull.outStream;
|
|
s.write(data);
|
|
}
|
|
this._processLineBuffer(data, errbuffer, (line) => {
|
|
if (this.options.listeners && this.options.listeners.errline) {
|
|
this.options.listeners.errline(line);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
cp.on('error', (err) => {
|
|
state.processError = err.message;
|
|
state.processExited = true;
|
|
state.processClosed = true;
|
|
state.CheckComplete();
|
|
});
|
|
cp.on('exit', (code) => {
|
|
state.processExitCode = code;
|
|
state.processExited = true;
|
|
this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
|
|
state.CheckComplete();
|
|
});
|
|
cp.on('close', (code) => {
|
|
state.processExitCode = code;
|
|
state.processExited = true;
|
|
state.processClosed = true;
|
|
this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
|
|
state.CheckComplete();
|
|
});
|
|
state.on('done', (error, exitCode) => {
|
|
if (stdbuffer.length > 0) {
|
|
this.emit('stdline', stdbuffer);
|
|
}
|
|
if (errbuffer.length > 0) {
|
|
this.emit('errline', errbuffer);
|
|
}
|
|
cp.removeAllListeners();
|
|
if (error) {
|
|
reject(error);
|
|
}
|
|
else {
|
|
resolve(exitCode);
|
|
}
|
|
});
|
|
if (this.options.input) {
|
|
if (!cp.stdin) {
|
|
throw new Error('child process missing stdin');
|
|
}
|
|
cp.stdin.end(this.options.input);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
}
|
|
exports.ToolRunner = ToolRunner;
|
|
/**
|
|
* Convert an arg string to an array of args. Handles escaping
|
|
*
|
|
* @param argString string of arguments
|
|
* @returns string[] array of arguments
|
|
*/
|
|
function argStringToArray(argString) {
|
|
const args = [];
|
|
let inQuotes = false;
|
|
let escaped = false;
|
|
let arg = '';
|
|
function append(c) {
|
|
// we only escape double quotes.
|
|
if (escaped && c !== '"') {
|
|
arg += '\\';
|
|
}
|
|
arg += c;
|
|
escaped = false;
|
|
}
|
|
for (let i = 0; i < argString.length; i++) {
|
|
const c = argString.charAt(i);
|
|
if (c === '"') {
|
|
if (!escaped) {
|
|
inQuotes = !inQuotes;
|
|
}
|
|
else {
|
|
append(c);
|
|
}
|
|
continue;
|
|
}
|
|
if (c === '\\' && escaped) {
|
|
append(c);
|
|
continue;
|
|
}
|
|
if (c === '\\' && inQuotes) {
|
|
escaped = true;
|
|
continue;
|
|
}
|
|
if (c === ' ' && !inQuotes) {
|
|
if (arg.length > 0) {
|
|
args.push(arg);
|
|
arg = '';
|
|
}
|
|
continue;
|
|
}
|
|
append(c);
|
|
}
|
|
if (arg.length > 0) {
|
|
args.push(arg.trim());
|
|
}
|
|
return args;
|
|
}
|
|
exports.argStringToArray = argStringToArray;
|
|
class ExecState extends events.EventEmitter {
|
|
constructor(options, toolPath) {
|
|
super();
|
|
this.processClosed = false; // tracks whether the process has exited and stdio is closed
|
|
this.processError = '';
|
|
this.processExitCode = 0;
|
|
this.processExited = false; // tracks whether the process has exited
|
|
this.processStderr = false; // tracks whether stderr was written to
|
|
this.delay = 10000; // 10 seconds
|
|
this.done = false;
|
|
this.timeout = null;
|
|
if (!toolPath) {
|
|
throw new Error('toolPath must not be empty');
|
|
}
|
|
this.options = options;
|
|
this.toolPath = toolPath;
|
|
if (options.delay) {
|
|
this.delay = options.delay;
|
|
}
|
|
}
|
|
CheckComplete() {
|
|
if (this.done) {
|
|
return;
|
|
}
|
|
if (this.processClosed) {
|
|
this._setResult();
|
|
}
|
|
else if (this.processExited) {
|
|
this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);
|
|
}
|
|
}
|
|
_debug(message) {
|
|
this.emit('debug', message);
|
|
}
|
|
_setResult() {
|
|
// determine whether there is an error
|
|
let error;
|
|
if (this.processExited) {
|
|
if (this.processError) {
|
|
error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
|
|
}
|
|
else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
|
|
error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
|
|
}
|
|
else if (this.processStderr && this.options.failOnStdErr) {
|
|
error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
|
|
}
|
|
}
|
|
// clear the timeout
|
|
if (this.timeout) {
|
|
clearTimeout(this.timeout);
|
|
this.timeout = null;
|
|
}
|
|
this.done = true;
|
|
this.emit('done', error, this.processExitCode);
|
|
}
|
|
static HandleTimeout(state) {
|
|
if (state.done) {
|
|
return;
|
|
}
|
|
if (!state.processClosed && state.processExited) {
|
|
const message = `The STDIO streams did not close within ${state.delay /
|
|
1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
|
|
state._debug(message);
|
|
}
|
|
state._setResult();
|
|
}
|
|
}
|
|
//# sourceMappingURL=toolrunner.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9925:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const http = __nccwpck_require__(8605);
|
|
const https = __nccwpck_require__(7211);
|
|
const pm = __nccwpck_require__(6443);
|
|
let tunnel;
|
|
var HttpCodes;
|
|
(function (HttpCodes) {
|
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
|
var Headers;
|
|
(function (Headers) {
|
|
Headers["Accept"] = "accept";
|
|
Headers["ContentType"] = "content-type";
|
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
|
var MediaTypes;
|
|
(function (MediaTypes) {
|
|
MediaTypes["ApplicationJson"] = "application/json";
|
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
|
/**
|
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
function getProxyUrl(serverUrl) {
|
|
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
|
return proxyUrl ? proxyUrl.href : '';
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
const HttpRedirectCodes = [
|
|
HttpCodes.MovedPermanently,
|
|
HttpCodes.ResourceMoved,
|
|
HttpCodes.SeeOther,
|
|
HttpCodes.TemporaryRedirect,
|
|
HttpCodes.PermanentRedirect
|
|
];
|
|
const HttpResponseRetryCodes = [
|
|
HttpCodes.BadGateway,
|
|
HttpCodes.ServiceUnavailable,
|
|
HttpCodes.GatewayTimeout
|
|
];
|
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
|
const ExponentialBackoffCeiling = 10;
|
|
const ExponentialBackoffTimeSlice = 5;
|
|
class HttpClientError extends Error {
|
|
constructor(message, statusCode) {
|
|
super(message);
|
|
this.name = 'HttpClientError';
|
|
this.statusCode = statusCode;
|
|
Object.setPrototypeOf(this, HttpClientError.prototype);
|
|
}
|
|
}
|
|
exports.HttpClientError = HttpClientError;
|
|
class HttpClientResponse {
|
|
constructor(message) {
|
|
this.message = message;
|
|
}
|
|
readBody() {
|
|
return new Promise(async (resolve, reject) => {
|
|
let output = Buffer.alloc(0);
|
|
this.message.on('data', (chunk) => {
|
|
output = Buffer.concat([output, chunk]);
|
|
});
|
|
this.message.on('end', () => {
|
|
resolve(output.toString());
|
|
});
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClientResponse = HttpClientResponse;
|
|
function isHttps(requestUrl) {
|
|
let parsedUrl = new URL(requestUrl);
|
|
return parsedUrl.protocol === 'https:';
|
|
}
|
|
exports.isHttps = isHttps;
|
|
class HttpClient {
|
|
constructor(userAgent, handlers, requestOptions) {
|
|
this._ignoreSslError = false;
|
|
this._allowRedirects = true;
|
|
this._allowRedirectDowngrade = false;
|
|
this._maxRedirects = 50;
|
|
this._allowRetries = false;
|
|
this._maxRetries = 1;
|
|
this._keepAlive = false;
|
|
this._disposed = false;
|
|
this.userAgent = userAgent;
|
|
this.handlers = handlers || [];
|
|
this.requestOptions = requestOptions;
|
|
if (requestOptions) {
|
|
if (requestOptions.ignoreSslError != null) {
|
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
|
}
|
|
this._socketTimeout = requestOptions.socketTimeout;
|
|
if (requestOptions.allowRedirects != null) {
|
|
this._allowRedirects = requestOptions.allowRedirects;
|
|
}
|
|
if (requestOptions.allowRedirectDowngrade != null) {
|
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
|
}
|
|
if (requestOptions.maxRedirects != null) {
|
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
|
}
|
|
if (requestOptions.keepAlive != null) {
|
|
this._keepAlive = requestOptions.keepAlive;
|
|
}
|
|
if (requestOptions.allowRetries != null) {
|
|
this._allowRetries = requestOptions.allowRetries;
|
|
}
|
|
if (requestOptions.maxRetries != null) {
|
|
this._maxRetries = requestOptions.maxRetries;
|
|
}
|
|
}
|
|
}
|
|
options(requestUrl, additionalHeaders) {
|
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
get(requestUrl, additionalHeaders) {
|
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
del(requestUrl, additionalHeaders) {
|
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
post(requestUrl, data, additionalHeaders) {
|
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
patch(requestUrl, data, additionalHeaders) {
|
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
put(requestUrl, data, additionalHeaders) {
|
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
head(requestUrl, additionalHeaders) {
|
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
|
}
|
|
/**
|
|
* Gets a typed object from an endpoint
|
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
|
*/
|
|
async getJson(requestUrl, additionalHeaders = {}) {
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
let res = await this.get(requestUrl, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.post(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.put(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.patch(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
/**
|
|
* Makes a raw http request.
|
|
* All other methods such as get, post, patch, and request ultimately call this.
|
|
* Prefer get, del, post and patch
|
|
*/
|
|
async request(verb, requestUrl, data, headers) {
|
|
if (this._disposed) {
|
|
throw new Error('Client has already been disposed.');
|
|
}
|
|
let parsedUrl = new URL(requestUrl);
|
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
|
// Only perform retries on reads since writes may not be idempotent.
|
|
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
|
? this._maxRetries + 1
|
|
: 1;
|
|
let numTries = 0;
|
|
let response;
|
|
while (numTries < maxTries) {
|
|
response = await this.requestRaw(info, data);
|
|
// Check if it's an authentication challenge
|
|
if (response &&
|
|
response.message &&
|
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
|
let authenticationHandler;
|
|
for (let i = 0; i < this.handlers.length; i++) {
|
|
if (this.handlers[i].canHandleAuthentication(response)) {
|
|
authenticationHandler = this.handlers[i];
|
|
break;
|
|
}
|
|
}
|
|
if (authenticationHandler) {
|
|
return authenticationHandler.handleAuthentication(this, info, data);
|
|
}
|
|
else {
|
|
// We have received an unauthorized response but have no handlers to handle it.
|
|
// Let the response return to the caller.
|
|
return response;
|
|
}
|
|
}
|
|
let redirectsRemaining = this._maxRedirects;
|
|
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
|
this._allowRedirects &&
|
|
redirectsRemaining > 0) {
|
|
const redirectUrl = response.message.headers['location'];
|
|
if (!redirectUrl) {
|
|
// if there's no location to redirect to, we won't
|
|
break;
|
|
}
|
|
let parsedRedirectUrl = new URL(redirectUrl);
|
|
if (parsedUrl.protocol == 'https:' &&
|
|
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
|
!this._allowRedirectDowngrade) {
|
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
|
}
|
|
// we need to finish reading the response before reassigning response
|
|
// which will leak the open socket.
|
|
await response.readBody();
|
|
// strip authorization header if redirected to a different hostname
|
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
|
for (let header in headers) {
|
|
// header names are case insensitive
|
|
if (header.toLowerCase() === 'authorization') {
|
|
delete headers[header];
|
|
}
|
|
}
|
|
}
|
|
// let's make the request with the new redirectUrl
|
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
|
response = await this.requestRaw(info, data);
|
|
redirectsRemaining--;
|
|
}
|
|
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
|
|
// If not a retry code, return immediately instead of retrying
|
|
return response;
|
|
}
|
|
numTries += 1;
|
|
if (numTries < maxTries) {
|
|
await response.readBody();
|
|
await this._performExponentialBackoff(numTries);
|
|
}
|
|
}
|
|
return response;
|
|
}
|
|
/**
|
|
* Needs to be called if keepAlive is set to true in request options.
|
|
*/
|
|
dispose() {
|
|
if (this._agent) {
|
|
this._agent.destroy();
|
|
}
|
|
this._disposed = true;
|
|
}
|
|
/**
|
|
* Raw request.
|
|
* @param info
|
|
* @param data
|
|
*/
|
|
requestRaw(info, data) {
|
|
return new Promise((resolve, reject) => {
|
|
let callbackForResult = function (err, res) {
|
|
if (err) {
|
|
reject(err);
|
|
}
|
|
resolve(res);
|
|
};
|
|
this.requestRawWithCallback(info, data, callbackForResult);
|
|
});
|
|
}
|
|
/**
|
|
* Raw request with callback.
|
|
* @param info
|
|
* @param data
|
|
* @param onResult
|
|
*/
|
|
requestRawWithCallback(info, data, onResult) {
|
|
let socket;
|
|
if (typeof data === 'string') {
|
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
|
}
|
|
let callbackCalled = false;
|
|
let handleResult = (err, res) => {
|
|
if (!callbackCalled) {
|
|
callbackCalled = true;
|
|
onResult(err, res);
|
|
}
|
|
};
|
|
let req = info.httpModule.request(info.options, (msg) => {
|
|
let res = new HttpClientResponse(msg);
|
|
handleResult(null, res);
|
|
});
|
|
req.on('socket', sock => {
|
|
socket = sock;
|
|
});
|
|
// If we ever get disconnected, we want the socket to timeout eventually
|
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
|
if (socket) {
|
|
socket.end();
|
|
}
|
|
handleResult(new Error('Request timeout: ' + info.options.path), null);
|
|
});
|
|
req.on('error', function (err) {
|
|
// err has statusCode property
|
|
// res should have headers
|
|
handleResult(err, null);
|
|
});
|
|
if (data && typeof data === 'string') {
|
|
req.write(data, 'utf8');
|
|
}
|
|
if (data && typeof data !== 'string') {
|
|
data.on('close', function () {
|
|
req.end();
|
|
});
|
|
data.pipe(req);
|
|
}
|
|
else {
|
|
req.end();
|
|
}
|
|
}
|
|
/**
|
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
getAgent(serverUrl) {
|
|
let parsedUrl = new URL(serverUrl);
|
|
return this._getAgent(parsedUrl);
|
|
}
|
|
_prepareRequest(method, requestUrl, headers) {
|
|
const info = {};
|
|
info.parsedUrl = requestUrl;
|
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
|
info.httpModule = usingSsl ? https : http;
|
|
const defaultPort = usingSsl ? 443 : 80;
|
|
info.options = {};
|
|
info.options.host = info.parsedUrl.hostname;
|
|
info.options.port = info.parsedUrl.port
|
|
? parseInt(info.parsedUrl.port)
|
|
: defaultPort;
|
|
info.options.path =
|
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
|
info.options.method = method;
|
|
info.options.headers = this._mergeHeaders(headers);
|
|
if (this.userAgent != null) {
|
|
info.options.headers['user-agent'] = this.userAgent;
|
|
}
|
|
info.options.agent = this._getAgent(info.parsedUrl);
|
|
// gives handlers an opportunity to participate
|
|
if (this.handlers) {
|
|
this.handlers.forEach(handler => {
|
|
handler.prepareRequest(info.options);
|
|
});
|
|
}
|
|
return info;
|
|
}
|
|
_mergeHeaders(headers) {
|
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
|
}
|
|
return lowercaseKeys(headers || {});
|
|
}
|
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|
let clientHeader;
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
|
}
|
|
return additionalHeaders[header] || clientHeader || _default;
|
|
}
|
|
_getAgent(parsedUrl) {
|
|
let agent;
|
|
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
|
let useProxy = proxyUrl && proxyUrl.hostname;
|
|
if (this._keepAlive && useProxy) {
|
|
agent = this._proxyAgent;
|
|
}
|
|
if (this._keepAlive && !useProxy) {
|
|
agent = this._agent;
|
|
}
|
|
// if agent is already assigned use that agent.
|
|
if (!!agent) {
|
|
return agent;
|
|
}
|
|
const usingSsl = parsedUrl.protocol === 'https:';
|
|
let maxSockets = 100;
|
|
if (!!this.requestOptions) {
|
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
|
}
|
|
if (useProxy) {
|
|
// If using proxy, need tunnel
|
|
if (!tunnel) {
|
|
tunnel = __nccwpck_require__(4294);
|
|
}
|
|
const agentOptions = {
|
|
maxSockets: maxSockets,
|
|
keepAlive: this._keepAlive,
|
|
proxy: {
|
|
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,
|
|
host: proxyUrl.hostname,
|
|
port: proxyUrl.port
|
|
}
|
|
};
|
|
let tunnelAgent;
|
|
const overHttps = proxyUrl.protocol === 'https:';
|
|
if (usingSsl) {
|
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
|
}
|
|
else {
|
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
|
}
|
|
agent = tunnelAgent(agentOptions);
|
|
this._proxyAgent = agent;
|
|
}
|
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
|
if (this._keepAlive && !agent) {
|
|
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
|
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
|
this._agent = agent;
|
|
}
|
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
|
if (!agent) {
|
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
|
}
|
|
if (usingSsl && this._ignoreSslError) {
|
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
|
// we have to cast it to any and change it directly
|
|
agent.options = Object.assign(agent.options || {}, {
|
|
rejectUnauthorized: false
|
|
});
|
|
}
|
|
return agent;
|
|
}
|
|
_performExponentialBackoff(retryNumber) {
|
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
|
}
|
|
static dateTimeDeserializer(key, value) {
|
|
if (typeof value === 'string') {
|
|
let a = new Date(value);
|
|
if (!isNaN(a.valueOf())) {
|
|
return a;
|
|
}
|
|
}
|
|
return value;
|
|
}
|
|
async _processResponse(res, options) {
|
|
return new Promise(async (resolve, reject) => {
|
|
const statusCode = res.message.statusCode;
|
|
const response = {
|
|
statusCode: statusCode,
|
|
result: null,
|
|
headers: {}
|
|
};
|
|
// not found leads to null obj returned
|
|
if (statusCode == HttpCodes.NotFound) {
|
|
resolve(response);
|
|
}
|
|
let obj;
|
|
let contents;
|
|
// get the result from the body
|
|
try {
|
|
contents = await res.readBody();
|
|
if (contents && contents.length > 0) {
|
|
if (options && options.deserializeDates) {
|
|
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
|
|
}
|
|
else {
|
|
obj = JSON.parse(contents);
|
|
}
|
|
response.result = obj;
|
|
}
|
|
response.headers = res.message.headers;
|
|
}
|
|
catch (err) {
|
|
// Invalid resource (contents not json); leaving result obj null
|
|
}
|
|
// note that 3xx redirects are handled by the http layer.
|
|
if (statusCode > 299) {
|
|
let msg;
|
|
// if exception/error in body, attempt to get better error
|
|
if (obj && obj.message) {
|
|
msg = obj.message;
|
|
}
|
|
else if (contents && contents.length > 0) {
|
|
// it may be the case that the exception is in the body message as string
|
|
msg = contents;
|
|
}
|
|
else {
|
|
msg = 'Failed request: (' + statusCode + ')';
|
|
}
|
|
let err = new HttpClientError(msg, statusCode);
|
|
err.result = response.result;
|
|
reject(err);
|
|
}
|
|
else {
|
|
resolve(response);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClient = HttpClient;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6443:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
function getProxyUrl(reqUrl) {
|
|
let usingSsl = reqUrl.protocol === 'https:';
|
|
let proxyUrl;
|
|
if (checkBypass(reqUrl)) {
|
|
return proxyUrl;
|
|
}
|
|
let proxyVar;
|
|
if (usingSsl) {
|
|
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
|
}
|
|
else {
|
|
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
|
}
|
|
if (proxyVar) {
|
|
proxyUrl = new URL(proxyVar);
|
|
}
|
|
return proxyUrl;
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
function checkBypass(reqUrl) {
|
|
if (!reqUrl.hostname) {
|
|
return false;
|
|
}
|
|
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
|
if (!noProxy) {
|
|
return false;
|
|
}
|
|
// Determine the request port
|
|
let reqPort;
|
|
if (reqUrl.port) {
|
|
reqPort = Number(reqUrl.port);
|
|
}
|
|
else if (reqUrl.protocol === 'http:') {
|
|
reqPort = 80;
|
|
}
|
|
else if (reqUrl.protocol === 'https:') {
|
|
reqPort = 443;
|
|
}
|
|
// Format the request hostname and hostname with port
|
|
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
|
if (typeof reqPort === 'number') {
|
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
|
}
|
|
// Compare request host against noproxy
|
|
for (let upperNoProxyItem of noProxy
|
|
.split(',')
|
|
.map(x => x.trim().toUpperCase())
|
|
.filter(x => x)) {
|
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
exports.checkBypass = checkBypass;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1962:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var _a;
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const assert_1 = __nccwpck_require__(2357);
|
|
const fs = __nccwpck_require__(5747);
|
|
const path = __nccwpck_require__(5622);
|
|
_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
|
|
exports.IS_WINDOWS = process.platform === 'win32';
|
|
function exists(fsPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
yield exports.stat(fsPath);
|
|
}
|
|
catch (err) {
|
|
if (err.code === 'ENOENT') {
|
|
return false;
|
|
}
|
|
throw err;
|
|
}
|
|
return true;
|
|
});
|
|
}
|
|
exports.exists = exists;
|
|
function isDirectory(fsPath, useStat = false) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
|
|
return stats.isDirectory();
|
|
});
|
|
}
|
|
exports.isDirectory = isDirectory;
|
|
/**
|
|
* On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
|
|
* \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
|
|
*/
|
|
function isRooted(p) {
|
|
p = normalizeSeparators(p);
|
|
if (!p) {
|
|
throw new Error('isRooted() parameter "p" cannot be empty');
|
|
}
|
|
if (exports.IS_WINDOWS) {
|
|
return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
|
|
); // e.g. C: or C:\hello
|
|
}
|
|
return p.startsWith('/');
|
|
}
|
|
exports.isRooted = isRooted;
|
|
/**
|
|
* Recursively create a directory at `fsPath`.
|
|
*
|
|
* This implementation is optimistic, meaning it attempts to create the full
|
|
* path first, and backs up the path stack from there.
|
|
*
|
|
* @param fsPath The path to create
|
|
* @param maxDepth The maximum recursion depth
|
|
* @param depth The current recursion depth
|
|
*/
|
|
function mkdirP(fsPath, maxDepth = 1000, depth = 1) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert_1.ok(fsPath, 'a path argument must be provided');
|
|
fsPath = path.resolve(fsPath);
|
|
if (depth >= maxDepth)
|
|
return exports.mkdir(fsPath);
|
|
try {
|
|
yield exports.mkdir(fsPath);
|
|
return;
|
|
}
|
|
catch (err) {
|
|
switch (err.code) {
|
|
case 'ENOENT': {
|
|
yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);
|
|
yield exports.mkdir(fsPath);
|
|
return;
|
|
}
|
|
default: {
|
|
let stats;
|
|
try {
|
|
stats = yield exports.stat(fsPath);
|
|
}
|
|
catch (err2) {
|
|
throw err;
|
|
}
|
|
if (!stats.isDirectory())
|
|
throw err;
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
exports.mkdirP = mkdirP;
|
|
/**
|
|
* Best effort attempt to determine whether a file exists and is executable.
|
|
* @param filePath file path to check
|
|
* @param extensions additional file extensions to try
|
|
* @return if file exists and is executable, returns the file path. otherwise empty string.
|
|
*/
|
|
function tryGetExecutablePath(filePath, extensions) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let stats = undefined;
|
|
try {
|
|
// test file exists
|
|
stats = yield exports.stat(filePath);
|
|
}
|
|
catch (err) {
|
|
if (err.code !== 'ENOENT') {
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
|
|
}
|
|
}
|
|
if (stats && stats.isFile()) {
|
|
if (exports.IS_WINDOWS) {
|
|
// on Windows, test for valid extension
|
|
const upperExt = path.extname(filePath).toUpperCase();
|
|
if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
|
|
return filePath;
|
|
}
|
|
}
|
|
else {
|
|
if (isUnixExecutable(stats)) {
|
|
return filePath;
|
|
}
|
|
}
|
|
}
|
|
// try each extension
|
|
const originalFilePath = filePath;
|
|
for (const extension of extensions) {
|
|
filePath = originalFilePath + extension;
|
|
stats = undefined;
|
|
try {
|
|
stats = yield exports.stat(filePath);
|
|
}
|
|
catch (err) {
|
|
if (err.code !== 'ENOENT') {
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
|
|
}
|
|
}
|
|
if (stats && stats.isFile()) {
|
|
if (exports.IS_WINDOWS) {
|
|
// preserve the case of the actual file (since an extension was appended)
|
|
try {
|
|
const directory = path.dirname(filePath);
|
|
const upperName = path.basename(filePath).toUpperCase();
|
|
for (const actualName of yield exports.readdir(directory)) {
|
|
if (upperName === actualName.toUpperCase()) {
|
|
filePath = path.join(directory, actualName);
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
catch (err) {
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
|
|
}
|
|
return filePath;
|
|
}
|
|
else {
|
|
if (isUnixExecutable(stats)) {
|
|
return filePath;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return '';
|
|
});
|
|
}
|
|
exports.tryGetExecutablePath = tryGetExecutablePath;
|
|
function normalizeSeparators(p) {
|
|
p = p || '';
|
|
if (exports.IS_WINDOWS) {
|
|
// convert slashes on Windows
|
|
p = p.replace(/\//g, '\\');
|
|
// remove redundant slashes
|
|
return p.replace(/\\\\+/g, '\\');
|
|
}
|
|
// remove redundant slashes
|
|
return p.replace(/\/\/+/g, '/');
|
|
}
|
|
// on Mac/Linux, test the execute bit
|
|
// R W X R W X R W X
|
|
// 256 128 64 32 16 8 4 2 1
|
|
function isUnixExecutable(stats) {
|
|
return ((stats.mode & 1) > 0 ||
|
|
((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
|
|
((stats.mode & 64) > 0 && stats.uid === process.getuid()));
|
|
}
|
|
//# sourceMappingURL=io-util.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7436:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const childProcess = __nccwpck_require__(3129);
|
|
const path = __nccwpck_require__(5622);
|
|
const util_1 = __nccwpck_require__(1669);
|
|
const ioUtil = __nccwpck_require__(1962);
|
|
const exec = util_1.promisify(childProcess.exec);
|
|
/**
|
|
* Copies a file or folder.
|
|
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
|
|
*
|
|
* @param source source path
|
|
* @param dest destination path
|
|
* @param options optional. See CopyOptions.
|
|
*/
|
|
function cp(source, dest, options = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const { force, recursive } = readCopyOptions(options);
|
|
const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;
|
|
// Dest is an existing file, but not forcing
|
|
if (destStat && destStat.isFile() && !force) {
|
|
return;
|
|
}
|
|
// If dest is an existing directory, should copy inside.
|
|
const newDest = destStat && destStat.isDirectory()
|
|
? path.join(dest, path.basename(source))
|
|
: dest;
|
|
if (!(yield ioUtil.exists(source))) {
|
|
throw new Error(`no such file or directory: ${source}`);
|
|
}
|
|
const sourceStat = yield ioUtil.stat(source);
|
|
if (sourceStat.isDirectory()) {
|
|
if (!recursive) {
|
|
throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);
|
|
}
|
|
else {
|
|
yield cpDirRecursive(source, newDest, 0, force);
|
|
}
|
|
}
|
|
else {
|
|
if (path.relative(source, newDest) === '') {
|
|
// a file cannot be copied to itself
|
|
throw new Error(`'${newDest}' and '${source}' are the same file`);
|
|
}
|
|
yield copyFile(source, newDest, force);
|
|
}
|
|
});
|
|
}
|
|
exports.cp = cp;
|
|
/**
|
|
* Moves a path.
|
|
*
|
|
* @param source source path
|
|
* @param dest destination path
|
|
* @param options optional. See MoveOptions.
|
|
*/
|
|
function mv(source, dest, options = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (yield ioUtil.exists(dest)) {
|
|
let destExists = true;
|
|
if (yield ioUtil.isDirectory(dest)) {
|
|
// If dest is directory copy src into dest
|
|
dest = path.join(dest, path.basename(source));
|
|
destExists = yield ioUtil.exists(dest);
|
|
}
|
|
if (destExists) {
|
|
if (options.force == null || options.force) {
|
|
yield rmRF(dest);
|
|
}
|
|
else {
|
|
throw new Error('Destination already exists');
|
|
}
|
|
}
|
|
}
|
|
yield mkdirP(path.dirname(dest));
|
|
yield ioUtil.rename(source, dest);
|
|
});
|
|
}
|
|
exports.mv = mv;
|
|
/**
|
|
* Remove a path recursively with force
|
|
*
|
|
* @param inputPath path to remove
|
|
*/
|
|
function rmRF(inputPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (ioUtil.IS_WINDOWS) {
|
|
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
|
|
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
|
|
try {
|
|
if (yield ioUtil.isDirectory(inputPath, true)) {
|
|
yield exec(`rd /s /q "${inputPath}"`);
|
|
}
|
|
else {
|
|
yield exec(`del /f /a "${inputPath}"`);
|
|
}
|
|
}
|
|
catch (err) {
|
|
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
// other errors are valid
|
|
if (err.code !== 'ENOENT')
|
|
throw err;
|
|
}
|
|
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
|
|
try {
|
|
yield ioUtil.unlink(inputPath);
|
|
}
|
|
catch (err) {
|
|
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
// other errors are valid
|
|
if (err.code !== 'ENOENT')
|
|
throw err;
|
|
}
|
|
}
|
|
else {
|
|
let isDir = false;
|
|
try {
|
|
isDir = yield ioUtil.isDirectory(inputPath);
|
|
}
|
|
catch (err) {
|
|
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
// other errors are valid
|
|
if (err.code !== 'ENOENT')
|
|
throw err;
|
|
return;
|
|
}
|
|
if (isDir) {
|
|
yield exec(`rm -rf "${inputPath}"`);
|
|
}
|
|
else {
|
|
yield ioUtil.unlink(inputPath);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
exports.rmRF = rmRF;
|
|
/**
|
|
* Make a directory. Creates the full path with folders in between
|
|
* Will throw if it fails
|
|
*
|
|
* @param fsPath path to create
|
|
* @returns Promise<void>
|
|
*/
|
|
function mkdirP(fsPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
yield ioUtil.mkdirP(fsPath);
|
|
});
|
|
}
|
|
exports.mkdirP = mkdirP;
|
|
/**
|
|
* Returns path of a tool had the tool actually been invoked. Resolves via paths.
|
|
* If you check and the tool does not exist, it will throw.
|
|
*
|
|
* @param tool name of the tool
|
|
* @param check whether to check if tool exists
|
|
* @returns Promise<string> path to tool
|
|
*/
|
|
function which(tool, check) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!tool) {
|
|
throw new Error("parameter 'tool' is required");
|
|
}
|
|
// recursive when check=true
|
|
if (check) {
|
|
const result = yield which(tool, false);
|
|
if (!result) {
|
|
if (ioUtil.IS_WINDOWS) {
|
|
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);
|
|
}
|
|
else {
|
|
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);
|
|
}
|
|
}
|
|
}
|
|
try {
|
|
// build the list of extensions to try
|
|
const extensions = [];
|
|
if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {
|
|
for (const extension of process.env.PATHEXT.split(path.delimiter)) {
|
|
if (extension) {
|
|
extensions.push(extension);
|
|
}
|
|
}
|
|
}
|
|
// if it's rooted, return it if exists. otherwise return empty.
|
|
if (ioUtil.isRooted(tool)) {
|
|
const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);
|
|
if (filePath) {
|
|
return filePath;
|
|
}
|
|
return '';
|
|
}
|
|
// if any path separators, return empty
|
|
if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) {
|
|
return '';
|
|
}
|
|
// build the list of directories
|
|
//
|
|
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
|
|
// it feels like we should not do this. Checking the current directory seems like more of a use
|
|
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
|
|
// across platforms.
|
|
const directories = [];
|
|
if (process.env.PATH) {
|
|
for (const p of process.env.PATH.split(path.delimiter)) {
|
|
if (p) {
|
|
directories.push(p);
|
|
}
|
|
}
|
|
}
|
|
// return the first match
|
|
for (const directory of directories) {
|
|
const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);
|
|
if (filePath) {
|
|
return filePath;
|
|
}
|
|
}
|
|
return '';
|
|
}
|
|
catch (err) {
|
|
throw new Error(`which failed with message ${err.message}`);
|
|
}
|
|
});
|
|
}
|
|
exports.which = which;
|
|
function readCopyOptions(options) {
|
|
const force = options.force == null ? true : options.force;
|
|
const recursive = Boolean(options.recursive);
|
|
return { force, recursive };
|
|
}
|
|
function cpDirRecursive(sourceDir, destDir, currentDepth, force) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Ensure there is not a run away recursive copy
|
|
if (currentDepth >= 255)
|
|
return;
|
|
currentDepth++;
|
|
yield mkdirP(destDir);
|
|
const files = yield ioUtil.readdir(sourceDir);
|
|
for (const fileName of files) {
|
|
const srcFile = `${sourceDir}/${fileName}`;
|
|
const destFile = `${destDir}/${fileName}`;
|
|
const srcFileStat = yield ioUtil.lstat(srcFile);
|
|
if (srcFileStat.isDirectory()) {
|
|
// Recurse
|
|
yield cpDirRecursive(srcFile, destFile, currentDepth, force);
|
|
}
|
|
else {
|
|
yield copyFile(srcFile, destFile, force);
|
|
}
|
|
}
|
|
// Change the mode for the newly created directory
|
|
yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);
|
|
});
|
|
}
|
|
// Buffered file copy
|
|
function copyFile(srcFile, destFile, force) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {
|
|
// unlink/re-link it
|
|
try {
|
|
yield ioUtil.lstat(destFile);
|
|
yield ioUtil.unlink(destFile);
|
|
}
|
|
catch (e) {
|
|
// Try to override file permission
|
|
if (e.code === 'EPERM') {
|
|
yield ioUtil.chmod(destFile, '0666');
|
|
yield ioUtil.unlink(destFile);
|
|
}
|
|
// other errors = it doesn't exist, no work to do
|
|
}
|
|
// Copy over symlink
|
|
const symlinkFull = yield ioUtil.readlink(srcFile);
|
|
yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);
|
|
}
|
|
else if (!(yield ioUtil.exists(destFile)) || force) {
|
|
yield ioUtil.copyFile(srcFile, destFile);
|
|
}
|
|
});
|
|
}
|
|
//# sourceMappingURL=io.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2473:
|
|
/***/ (function(module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const semver = __importStar(__nccwpck_require__(562));
|
|
const core_1 = __nccwpck_require__(2186);
|
|
// needs to be require for core node modules to be mocked
|
|
/* eslint @typescript-eslint/no-require-imports: 0 */
|
|
const os = __nccwpck_require__(2087);
|
|
const cp = __nccwpck_require__(3129);
|
|
const fs = __nccwpck_require__(5747);
|
|
function _findMatch(versionSpec, stable, candidates, archFilter) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const platFilter = os.platform();
|
|
let result;
|
|
let match;
|
|
let file;
|
|
for (const candidate of candidates) {
|
|
const version = candidate.version;
|
|
core_1.debug(`check ${version} satisfies ${versionSpec}`);
|
|
if (semver.satisfies(version, versionSpec) &&
|
|
(!stable || candidate.stable === stable)) {
|
|
file = candidate.files.find(item => {
|
|
core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
|
|
let chk = item.arch === archFilter && item.platform === platFilter;
|
|
if (chk && item.platform_version) {
|
|
const osVersion = module.exports._getOsVersion();
|
|
if (osVersion === item.platform_version) {
|
|
chk = true;
|
|
}
|
|
else {
|
|
chk = semver.satisfies(osVersion, item.platform_version);
|
|
}
|
|
}
|
|
return chk;
|
|
});
|
|
if (file) {
|
|
core_1.debug(`matched ${candidate.version}`);
|
|
match = candidate;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
if (match && file) {
|
|
// clone since we're mutating the file list to be only the file that matches
|
|
result = Object.assign({}, match);
|
|
result.files = [file];
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports._findMatch = _findMatch;
|
|
function _getOsVersion() {
|
|
// TODO: add windows and other linux, arm variants
|
|
// right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)
|
|
const plat = os.platform();
|
|
let version = '';
|
|
if (plat === 'darwin') {
|
|
version = cp.execSync('sw_vers -productVersion').toString();
|
|
}
|
|
else if (plat === 'linux') {
|
|
// lsb_release process not in some containers, readfile
|
|
// Run cat /etc/lsb-release
|
|
// DISTRIB_ID=Ubuntu
|
|
// DISTRIB_RELEASE=18.04
|
|
// DISTRIB_CODENAME=bionic
|
|
// DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS"
|
|
const lsbContents = module.exports._readLinuxVersionFile();
|
|
if (lsbContents) {
|
|
const lines = lsbContents.split('\n');
|
|
for (const line of lines) {
|
|
const parts = line.split('=');
|
|
if (parts.length === 2 && parts[0].trim() === 'DISTRIB_RELEASE') {
|
|
version = parts[1].trim();
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return version;
|
|
}
|
|
exports._getOsVersion = _getOsVersion;
|
|
function _readLinuxVersionFile() {
|
|
const lsbFile = '/etc/lsb-release';
|
|
let contents = '';
|
|
if (fs.existsSync(lsbFile)) {
|
|
contents = fs.readFileSync(lsbFile).toString();
|
|
}
|
|
return contents;
|
|
}
|
|
exports._readLinuxVersionFile = _readLinuxVersionFile;
|
|
//# sourceMappingURL=manifest.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8279:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const core = __importStar(__nccwpck_require__(2186));
|
|
/**
|
|
* Internal class for retries
|
|
*/
|
|
class RetryHelper {
|
|
constructor(maxAttempts, minSeconds, maxSeconds) {
|
|
if (maxAttempts < 1) {
|
|
throw new Error('max attempts should be greater than or equal to 1');
|
|
}
|
|
this.maxAttempts = maxAttempts;
|
|
this.minSeconds = Math.floor(minSeconds);
|
|
this.maxSeconds = Math.floor(maxSeconds);
|
|
if (this.minSeconds > this.maxSeconds) {
|
|
throw new Error('min seconds should be less than or equal to max seconds');
|
|
}
|
|
}
|
|
execute(action, isRetryable) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let attempt = 1;
|
|
while (attempt < this.maxAttempts) {
|
|
// Try
|
|
try {
|
|
return yield action();
|
|
}
|
|
catch (err) {
|
|
if (isRetryable && !isRetryable(err)) {
|
|
throw err;
|
|
}
|
|
core.info(err.message);
|
|
}
|
|
// Sleep
|
|
const seconds = this.getSleepAmount();
|
|
core.info(`Waiting ${seconds} seconds before trying again`);
|
|
yield this.sleep(seconds);
|
|
attempt++;
|
|
}
|
|
// Last attempt
|
|
return yield action();
|
|
});
|
|
}
|
|
getSleepAmount() {
|
|
return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +
|
|
this.minSeconds);
|
|
}
|
|
sleep(seconds) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise(resolve => setTimeout(resolve, seconds * 1000));
|
|
});
|
|
}
|
|
}
|
|
exports.RetryHelper = RetryHelper;
|
|
//# sourceMappingURL=retry-helper.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7784:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const core = __importStar(__nccwpck_require__(2186));
|
|
const io = __importStar(__nccwpck_require__(7436));
|
|
const fs = __importStar(__nccwpck_require__(5747));
|
|
const mm = __importStar(__nccwpck_require__(2473));
|
|
const os = __importStar(__nccwpck_require__(2087));
|
|
const path = __importStar(__nccwpck_require__(5622));
|
|
const httpm = __importStar(__nccwpck_require__(9925));
|
|
const semver = __importStar(__nccwpck_require__(562));
|
|
const stream = __importStar(__nccwpck_require__(2413));
|
|
const util = __importStar(__nccwpck_require__(1669));
|
|
const v4_1 = __importDefault(__nccwpck_require__(824));
|
|
const exec_1 = __nccwpck_require__(1514);
|
|
const assert_1 = __nccwpck_require__(2357);
|
|
const retry_helper_1 = __nccwpck_require__(8279);
|
|
class HTTPError extends Error {
|
|
constructor(httpStatusCode) {
|
|
super(`Unexpected HTTP response: ${httpStatusCode}`);
|
|
this.httpStatusCode = httpStatusCode;
|
|
Object.setPrototypeOf(this, new.target.prototype);
|
|
}
|
|
}
|
|
exports.HTTPError = HTTPError;
|
|
const IS_WINDOWS = process.platform === 'win32';
|
|
const IS_MAC = process.platform === 'darwin';
|
|
const userAgent = 'actions/tool-cache';
|
|
/**
|
|
* Download a tool from an url and stream it into a file
|
|
*
|
|
* @param url url of tool to download
|
|
* @param dest path to download tool
|
|
* @param auth authorization header
|
|
* @returns path to downloaded tool
|
|
*/
|
|
function downloadTool(url, dest, auth) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
dest = dest || path.join(_getTempDirectory(), v4_1.default());
|
|
yield io.mkdirP(path.dirname(dest));
|
|
core.debug(`Downloading ${url}`);
|
|
core.debug(`Destination ${dest}`);
|
|
const maxAttempts = 3;
|
|
const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
|
|
const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
|
|
const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);
|
|
return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
|
return yield downloadToolAttempt(url, dest || '', auth);
|
|
}), (err) => {
|
|
if (err instanceof HTTPError && err.httpStatusCode) {
|
|
// Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
|
|
if (err.httpStatusCode < 500 &&
|
|
err.httpStatusCode !== 408 &&
|
|
err.httpStatusCode !== 429) {
|
|
return false;
|
|
}
|
|
}
|
|
// Otherwise retry
|
|
return true;
|
|
});
|
|
});
|
|
}
|
|
exports.downloadTool = downloadTool;
|
|
function downloadToolAttempt(url, dest, auth) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (fs.existsSync(dest)) {
|
|
throw new Error(`Destination file path ${dest} already exists`);
|
|
}
|
|
// Get the response headers
|
|
const http = new httpm.HttpClient(userAgent, [], {
|
|
allowRetries: false
|
|
});
|
|
let headers;
|
|
if (auth) {
|
|
core.debug('set auth');
|
|
headers = {
|
|
authorization: auth
|
|
};
|
|
}
|
|
const response = yield http.get(url, headers);
|
|
if (response.message.statusCode !== 200) {
|
|
const err = new HTTPError(response.message.statusCode);
|
|
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
|
throw err;
|
|
}
|
|
// Download the response body
|
|
const pipeline = util.promisify(stream.pipeline);
|
|
const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);
|
|
const readStream = responseMessageFactory();
|
|
let succeeded = false;
|
|
try {
|
|
yield pipeline(readStream, fs.createWriteStream(dest));
|
|
core.debug('download complete');
|
|
succeeded = true;
|
|
return dest;
|
|
}
|
|
finally {
|
|
// Error, delete dest before retry
|
|
if (!succeeded) {
|
|
core.debug('download failed');
|
|
try {
|
|
yield io.rmRF(dest);
|
|
}
|
|
catch (err) {
|
|
core.debug(`Failed to delete '${dest}'. ${err.message}`);
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Extract a .7z file
|
|
*
|
|
* @param file path to the .7z file
|
|
* @param dest destination directory. Optional.
|
|
* @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
|
|
* problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
|
|
* gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
|
|
* bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
|
|
* interface, it is smaller than the full command line interface, and it does support long paths. At the
|
|
* time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
|
|
* Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
|
|
* to 7zr.exe can be pass to this function.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extract7z(file, dest, _7zPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
|
|
assert_1.ok(file, 'parameter "file" is required');
|
|
dest = yield _createExtractFolder(dest);
|
|
const originalCwd = process.cwd();
|
|
process.chdir(dest);
|
|
if (_7zPath) {
|
|
try {
|
|
const logLevel = core.isDebug() ? '-bb1' : '-bb0';
|
|
const args = [
|
|
'x',
|
|
logLevel,
|
|
'-bd',
|
|
'-sccUTF-8',
|
|
file
|
|
];
|
|
const options = {
|
|
silent: true
|
|
};
|
|
yield exec_1.exec(`"${_7zPath}"`, args, options);
|
|
}
|
|
finally {
|
|
process.chdir(originalCwd);
|
|
}
|
|
}
|
|
else {
|
|
const escapedScript = path
|
|
.join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
|
|
.replace(/'/g, "''")
|
|
.replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
|
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
|
|
const args = [
|
|
'-NoLogo',
|
|
'-Sta',
|
|
'-NoProfile',
|
|
'-NonInteractive',
|
|
'-ExecutionPolicy',
|
|
'Unrestricted',
|
|
'-Command',
|
|
command
|
|
];
|
|
const options = {
|
|
silent: true
|
|
};
|
|
try {
|
|
const powershellPath = yield io.which('powershell', true);
|
|
yield exec_1.exec(`"${powershellPath}"`, args, options);
|
|
}
|
|
finally {
|
|
process.chdir(originalCwd);
|
|
}
|
|
}
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extract7z = extract7z;
|
|
/**
|
|
* Extract a compressed tar archive
|
|
*
|
|
* @param file path to the tar
|
|
* @param dest destination directory. Optional.
|
|
* @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extractTar(file, dest, flags = 'xz') {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!file) {
|
|
throw new Error("parameter 'file' is required");
|
|
}
|
|
// Create dest
|
|
dest = yield _createExtractFolder(dest);
|
|
// Determine whether GNU tar
|
|
core.debug('Checking tar --version');
|
|
let versionOutput = '';
|
|
yield exec_1.exec('tar --version', [], {
|
|
ignoreReturnCode: true,
|
|
silent: true,
|
|
listeners: {
|
|
stdout: (data) => (versionOutput += data.toString()),
|
|
stderr: (data) => (versionOutput += data.toString())
|
|
}
|
|
});
|
|
core.debug(versionOutput.trim());
|
|
const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
|
|
// Initialize args
|
|
let args;
|
|
if (flags instanceof Array) {
|
|
args = flags;
|
|
}
|
|
else {
|
|
args = [flags];
|
|
}
|
|
if (core.isDebug() && !flags.includes('v')) {
|
|
args.push('-v');
|
|
}
|
|
let destArg = dest;
|
|
let fileArg = file;
|
|
if (IS_WINDOWS && isGnuTar) {
|
|
args.push('--force-local');
|
|
destArg = dest.replace(/\\/g, '/');
|
|
// Technically only the dest needs to have `/` but for aesthetic consistency
|
|
// convert slashes in the file arg too.
|
|
fileArg = file.replace(/\\/g, '/');
|
|
}
|
|
if (isGnuTar) {
|
|
// Suppress warnings when using GNU tar to extract archives created by BSD tar
|
|
args.push('--warning=no-unknown-keyword');
|
|
}
|
|
args.push('-C', destArg, '-f', fileArg);
|
|
yield exec_1.exec(`tar`, args);
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extractTar = extractTar;
|
|
/**
|
|
* Extract a xar compatible archive
|
|
*
|
|
* @param file path to the archive
|
|
* @param dest destination directory. Optional.
|
|
* @param flags flags for the xar. Optional.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extractXar(file, dest, flags = []) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert_1.ok(IS_MAC, 'extractXar() not supported on current OS');
|
|
assert_1.ok(file, 'parameter "file" is required');
|
|
dest = yield _createExtractFolder(dest);
|
|
let args;
|
|
if (flags instanceof Array) {
|
|
args = flags;
|
|
}
|
|
else {
|
|
args = [flags];
|
|
}
|
|
args.push('-x', '-C', dest, '-f', file);
|
|
if (core.isDebug()) {
|
|
args.push('-v');
|
|
}
|
|
const xarPath = yield io.which('xar', true);
|
|
yield exec_1.exec(`"${xarPath}"`, _unique(args));
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extractXar = extractXar;
|
|
/**
|
|
* Extract a zip
|
|
*
|
|
* @param file path to the zip
|
|
* @param dest destination directory. Optional.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extractZip(file, dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!file) {
|
|
throw new Error("parameter 'file' is required");
|
|
}
|
|
dest = yield _createExtractFolder(dest);
|
|
if (IS_WINDOWS) {
|
|
yield extractZipWin(file, dest);
|
|
}
|
|
else {
|
|
yield extractZipNix(file, dest);
|
|
}
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extractZip = extractZip;
|
|
function extractZipWin(file, dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// build the powershell command
|
|
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
|
const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
|
|
// run powershell
|
|
const powershellPath = yield io.which('powershell', true);
|
|
const args = [
|
|
'-NoLogo',
|
|
'-Sta',
|
|
'-NoProfile',
|
|
'-NonInteractive',
|
|
'-ExecutionPolicy',
|
|
'Unrestricted',
|
|
'-Command',
|
|
command
|
|
];
|
|
yield exec_1.exec(`"${powershellPath}"`, args);
|
|
});
|
|
}
|
|
function extractZipNix(file, dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const unzipPath = yield io.which('unzip', true);
|
|
const args = [file];
|
|
if (!core.isDebug()) {
|
|
args.unshift('-q');
|
|
}
|
|
yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest });
|
|
});
|
|
}
|
|
/**
|
|
* Caches a directory and installs it into the tool cacheDir
|
|
*
|
|
* @param sourceDir the directory to cache into tools
|
|
* @param tool tool name
|
|
* @param version version of the tool. semver format
|
|
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
|
*/
|
|
function cacheDir(sourceDir, tool, version, arch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
version = semver.clean(version) || version;
|
|
arch = arch || os.arch();
|
|
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
|
core.debug(`source dir: ${sourceDir}`);
|
|
if (!fs.statSync(sourceDir).isDirectory()) {
|
|
throw new Error('sourceDir is not a directory');
|
|
}
|
|
// Create the tool dir
|
|
const destPath = yield _createToolPath(tool, version, arch);
|
|
// copy each child item. do not move. move can fail on Windows
|
|
// due to anti-virus software having an open handle on a file.
|
|
for (const itemName of fs.readdirSync(sourceDir)) {
|
|
const s = path.join(sourceDir, itemName);
|
|
yield io.cp(s, destPath, { recursive: true });
|
|
}
|
|
// write .complete
|
|
_completeToolPath(tool, version, arch);
|
|
return destPath;
|
|
});
|
|
}
|
|
exports.cacheDir = cacheDir;
|
|
/**
|
|
* Caches a downloaded file (GUID) and installs it
|
|
* into the tool cache with a given targetName
|
|
*
|
|
* @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
|
|
* @param targetFile the name of the file name in the tools directory
|
|
* @param tool tool name
|
|
* @param version version of the tool. semver format
|
|
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
|
*/
|
|
function cacheFile(sourceFile, targetFile, tool, version, arch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
version = semver.clean(version) || version;
|
|
arch = arch || os.arch();
|
|
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
|
core.debug(`source file: ${sourceFile}`);
|
|
if (!fs.statSync(sourceFile).isFile()) {
|
|
throw new Error('sourceFile is not a file');
|
|
}
|
|
// create the tool dir
|
|
const destFolder = yield _createToolPath(tool, version, arch);
|
|
// copy instead of move. move can fail on Windows due to
|
|
// anti-virus software having an open handle on a file.
|
|
const destPath = path.join(destFolder, targetFile);
|
|
core.debug(`destination file ${destPath}`);
|
|
yield io.cp(sourceFile, destPath);
|
|
// write .complete
|
|
_completeToolPath(tool, version, arch);
|
|
return destFolder;
|
|
});
|
|
}
|
|
exports.cacheFile = cacheFile;
|
|
/**
|
|
* Finds the path to a tool version in the local installed tool cache
|
|
*
|
|
* @param toolName name of the tool
|
|
* @param versionSpec version of the tool
|
|
* @param arch optional arch. defaults to arch of computer
|
|
*/
|
|
function find(toolName, versionSpec, arch) {
|
|
if (!toolName) {
|
|
throw new Error('toolName parameter is required');
|
|
}
|
|
if (!versionSpec) {
|
|
throw new Error('versionSpec parameter is required');
|
|
}
|
|
arch = arch || os.arch();
|
|
// attempt to resolve an explicit version
|
|
if (!_isExplicitVersion(versionSpec)) {
|
|
const localVersions = findAllVersions(toolName, arch);
|
|
const match = _evaluateVersions(localVersions, versionSpec);
|
|
versionSpec = match;
|
|
}
|
|
// check for the explicit version in the cache
|
|
let toolPath = '';
|
|
if (versionSpec) {
|
|
versionSpec = semver.clean(versionSpec) || '';
|
|
const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);
|
|
core.debug(`checking cache: ${cachePath}`);
|
|
if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
|
|
core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
|
|
toolPath = cachePath;
|
|
}
|
|
else {
|
|
core.debug('not found');
|
|
}
|
|
}
|
|
return toolPath;
|
|
}
|
|
exports.find = find;
|
|
/**
|
|
* Finds the paths to all versions of a tool that are installed in the local tool cache
|
|
*
|
|
* @param toolName name of the tool
|
|
* @param arch optional arch. defaults to arch of computer
|
|
*/
|
|
function findAllVersions(toolName, arch) {
|
|
const versions = [];
|
|
arch = arch || os.arch();
|
|
const toolPath = path.join(_getCacheDirectory(), toolName);
|
|
if (fs.existsSync(toolPath)) {
|
|
const children = fs.readdirSync(toolPath);
|
|
for (const child of children) {
|
|
if (_isExplicitVersion(child)) {
|
|
const fullPath = path.join(toolPath, child, arch || '');
|
|
if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
|
|
versions.push(child);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return versions;
|
|
}
|
|
exports.findAllVersions = findAllVersions;
|
|
function getManifestFromRepo(owner, repo, auth, branch = 'master') {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let releases = [];
|
|
const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`;
|
|
const http = new httpm.HttpClient('tool-cache');
|
|
const headers = {};
|
|
if (auth) {
|
|
core.debug('set auth');
|
|
headers.authorization = auth;
|
|
}
|
|
const response = yield http.getJson(treeUrl, headers);
|
|
if (!response.result) {
|
|
return releases;
|
|
}
|
|
let manifestUrl = '';
|
|
for (const item of response.result.tree) {
|
|
if (item.path === 'versions-manifest.json') {
|
|
manifestUrl = item.url;
|
|
break;
|
|
}
|
|
}
|
|
headers['accept'] = 'application/vnd.github.VERSION.raw';
|
|
let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody();
|
|
if (versionsRaw) {
|
|
// shouldn't be needed but protects against invalid json saved with BOM
|
|
versionsRaw = versionsRaw.replace(/^\uFEFF/, '');
|
|
try {
|
|
releases = JSON.parse(versionsRaw);
|
|
}
|
|
catch (_a) {
|
|
core.debug('Invalid json');
|
|
}
|
|
}
|
|
return releases;
|
|
});
|
|
}
|
|
exports.getManifestFromRepo = getManifestFromRepo;
|
|
function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// wrap the internal impl
|
|
const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter);
|
|
return match;
|
|
});
|
|
}
|
|
exports.findFromManifest = findFromManifest;
|
|
function _createExtractFolder(dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!dest) {
|
|
// create a temp dir
|
|
dest = path.join(_getTempDirectory(), v4_1.default());
|
|
}
|
|
yield io.mkdirP(dest);
|
|
return dest;
|
|
});
|
|
}
|
|
function _createToolPath(tool, version, arch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
|
core.debug(`destination ${folderPath}`);
|
|
const markerPath = `${folderPath}.complete`;
|
|
yield io.rmRF(folderPath);
|
|
yield io.rmRF(markerPath);
|
|
yield io.mkdirP(folderPath);
|
|
return folderPath;
|
|
});
|
|
}
|
|
function _completeToolPath(tool, version, arch) {
|
|
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
|
const markerPath = `${folderPath}.complete`;
|
|
fs.writeFileSync(markerPath, '');
|
|
core.debug('finished caching tool');
|
|
}
|
|
function _isExplicitVersion(versionSpec) {
|
|
const c = semver.clean(versionSpec) || '';
|
|
core.debug(`isExplicit: ${c}`);
|
|
const valid = semver.valid(c) != null;
|
|
core.debug(`explicit? ${valid}`);
|
|
return valid;
|
|
}
|
|
function _evaluateVersions(versions, versionSpec) {
|
|
let version = '';
|
|
core.debug(`evaluating ${versions.length} versions`);
|
|
versions = versions.sort((a, b) => {
|
|
if (semver.gt(a, b)) {
|
|
return 1;
|
|
}
|
|
return -1;
|
|
});
|
|
for (let i = versions.length - 1; i >= 0; i--) {
|
|
const potential = versions[i];
|
|
const satisfied = semver.satisfies(potential, versionSpec);
|
|
if (satisfied) {
|
|
version = potential;
|
|
break;
|
|
}
|
|
}
|
|
if (version) {
|
|
core.debug(`matched: ${version}`);
|
|
}
|
|
else {
|
|
core.debug('match not found');
|
|
}
|
|
return version;
|
|
}
|
|
/**
|
|
* Gets RUNNER_TOOL_CACHE
|
|
*/
|
|
function _getCacheDirectory() {
|
|
const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';
|
|
assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');
|
|
return cacheDirectory;
|
|
}
|
|
/**
|
|
* Gets RUNNER_TEMP
|
|
*/
|
|
function _getTempDirectory() {
|
|
const tempDirectory = process.env['RUNNER_TEMP'] || '';
|
|
assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
|
|
return tempDirectory;
|
|
}
|
|
/**
|
|
* Gets a global variable
|
|
*/
|
|
function _getGlobal(key, defaultValue) {
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
const value = global[key];
|
|
/* eslint-enable @typescript-eslint/no-explicit-any */
|
|
return value !== undefined ? value : defaultValue;
|
|
}
|
|
/**
|
|
* Returns an array of unique values.
|
|
* @param values Values to make unique.
|
|
*/
|
|
function _unique(values) {
|
|
return Array.from(new Set(values));
|
|
}
|
|
//# sourceMappingURL=tool-cache.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 562:
|
|
/***/ ((module, exports) => {
|
|
|
|
exports = module.exports = SemVer
|
|
|
|
var debug
|
|
/* istanbul ignore next */
|
|
if (typeof process === 'object' &&
|
|
process.env &&
|
|
process.env.NODE_DEBUG &&
|
|
/\bsemver\b/i.test(process.env.NODE_DEBUG)) {
|
|
debug = function () {
|
|
var args = Array.prototype.slice.call(arguments, 0)
|
|
args.unshift('SEMVER')
|
|
console.log.apply(console, args)
|
|
}
|
|
} else {
|
|
debug = function () {}
|
|
}
|
|
|
|
// Note: this is the semver.org version of the spec that it implements
|
|
// Not necessarily the package version of this code.
|
|
exports.SEMVER_SPEC_VERSION = '2.0.0'
|
|
|
|
var MAX_LENGTH = 256
|
|
var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
|
/* istanbul ignore next */ 9007199254740991
|
|
|
|
// Max safe segment length for coercion.
|
|
var MAX_SAFE_COMPONENT_LENGTH = 16
|
|
|
|
// The actual regexps go on exports.re
|
|
var re = exports.re = []
|
|
var src = exports.src = []
|
|
var t = exports.tokens = {}
|
|
var R = 0
|
|
|
|
function tok (n) {
|
|
t[n] = R++
|
|
}
|
|
|
|
// The following Regular Expressions can be used for tokenizing,
|
|
// validating, and parsing SemVer version strings.
|
|
|
|
// ## Numeric Identifier
|
|
// A single `0`, or a non-zero digit followed by zero or more digits.
|
|
|
|
tok('NUMERICIDENTIFIER')
|
|
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
|
tok('NUMERICIDENTIFIERLOOSE')
|
|
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
|
|
|
// ## Non-numeric Identifier
|
|
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
|
// more letters, digits, or hyphens.
|
|
|
|
tok('NONNUMERICIDENTIFIER')
|
|
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
|
|
|
// ## Main Version
|
|
// Three dot-separated numeric identifiers.
|
|
|
|
tok('MAINVERSION')
|
|
src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIER] + ')'
|
|
|
|
tok('MAINVERSIONLOOSE')
|
|
src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'
|
|
|
|
// ## Pre-release Version Identifier
|
|
// A numeric identifier, or a non-numeric identifier.
|
|
|
|
tok('PRERELEASEIDENTIFIER')
|
|
src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +
|
|
'|' + src[t.NONNUMERICIDENTIFIER] + ')'
|
|
|
|
tok('PRERELEASEIDENTIFIERLOOSE')
|
|
src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +
|
|
'|' + src[t.NONNUMERICIDENTIFIER] + ')'
|
|
|
|
// ## Pre-release Version
|
|
// Hyphen, followed by one or more dot-separated pre-release version
|
|
// identifiers.
|
|
|
|
tok('PRERELEASE')
|
|
src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +
|
|
'(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'
|
|
|
|
tok('PRERELEASELOOSE')
|
|
src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
|
'(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'
|
|
|
|
// ## Build Metadata Identifier
|
|
// Any combination of digits, letters, or hyphens.
|
|
|
|
tok('BUILDIDENTIFIER')
|
|
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
|
|
|
// ## Build Metadata
|
|
// Plus sign, followed by one or more period-separated build metadata
|
|
// identifiers.
|
|
|
|
tok('BUILD')
|
|
src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] +
|
|
'(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))'
|
|
|
|
// ## Full Version String
|
|
// A main version, followed optionally by a pre-release version and
|
|
// build metadata.
|
|
|
|
// Note that the only major, minor, patch, and pre-release sections of
|
|
// the version string are capturing groups. The build metadata is not a
|
|
// capturing group, because it should not ever be used in version
|
|
// comparison.
|
|
|
|
tok('FULL')
|
|
tok('FULLPLAIN')
|
|
src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +
|
|
src[t.PRERELEASE] + '?' +
|
|
src[t.BUILD] + '?'
|
|
|
|
src[t.FULL] = '^' + src[t.FULLPLAIN] + '$'
|
|
|
|
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
|
|
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
|
|
// common in the npm registry.
|
|
tok('LOOSEPLAIN')
|
|
src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] +
|
|
src[t.PRERELEASELOOSE] + '?' +
|
|
src[t.BUILD] + '?'
|
|
|
|
tok('LOOSE')
|
|
src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'
|
|
|
|
tok('GTLT')
|
|
src[t.GTLT] = '((?:<|>)?=?)'
|
|
|
|
// Something like "2.*" or "1.2.x".
|
|
// Note that "x.x" is a valid xRange identifer, meaning "any version"
|
|
// Only the first item is strictly required.
|
|
tok('XRANGEIDENTIFIERLOOSE')
|
|
src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
|
|
tok('XRANGEIDENTIFIER')
|
|
src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*'
|
|
|
|
tok('XRANGEPLAIN')
|
|
src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
|
|
'(?:' + src[t.PRERELEASE] + ')?' +
|
|
src[t.BUILD] + '?' +
|
|
')?)?'
|
|
|
|
tok('XRANGEPLAINLOOSE')
|
|
src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:' + src[t.PRERELEASELOOSE] + ')?' +
|
|
src[t.BUILD] + '?' +
|
|
')?)?'
|
|
|
|
tok('XRANGE')
|
|
src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$'
|
|
tok('XRANGELOOSE')
|
|
src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$'
|
|
|
|
// Coercion.
|
|
// Extract anything that could conceivably be a part of a valid semver
|
|
tok('COERCE')
|
|
src[t.COERCE] = '(^|[^\\d])' +
|
|
'(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
|
'(?:$|[^\\d])'
|
|
tok('COERCERTL')
|
|
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
|
|
|
// Tilde ranges.
|
|
// Meaning is "reasonably at or greater than"
|
|
tok('LONETILDE')
|
|
src[t.LONETILDE] = '(?:~>?)'
|
|
|
|
tok('TILDETRIM')
|
|
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
|
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
|
var tildeTrimReplace = '$1~'
|
|
|
|
tok('TILDE')
|
|
src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'
|
|
tok('TILDELOOSE')
|
|
src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'
|
|
|
|
// Caret ranges.
|
|
// Meaning is "at least and backwards compatible with"
|
|
tok('LONECARET')
|
|
src[t.LONECARET] = '(?:\\^)'
|
|
|
|
tok('CARETTRIM')
|
|
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
|
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
|
var caretTrimReplace = '$1^'
|
|
|
|
tok('CARET')
|
|
src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'
|
|
tok('CARETLOOSE')
|
|
src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'
|
|
|
|
// A simple gt/lt/eq thing, or just "" to indicate "any version"
|
|
tok('COMPARATORLOOSE')
|
|
src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'
|
|
tok('COMPARATOR')
|
|
src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$'
|
|
|
|
// An expression to strip any whitespace between the gtlt and the thing
|
|
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
|
|
tok('COMPARATORTRIM')
|
|
src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
|
'\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'
|
|
|
|
// this one has to use the /g flag
|
|
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
|
var comparatorTrimReplace = '$1$2$3'
|
|
|
|
// Something like `1.2.3 - 1.2.4`
|
|
// Note that these all use the loose form, because they'll be
|
|
// checked against either the strict or loose comparator form
|
|
// later.
|
|
tok('HYPHENRANGE')
|
|
src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' +
|
|
'\\s+-\\s+' +
|
|
'(' + src[t.XRANGEPLAIN] + ')' +
|
|
'\\s*$'
|
|
|
|
tok('HYPHENRANGELOOSE')
|
|
src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +
|
|
'\\s+-\\s+' +
|
|
'(' + src[t.XRANGEPLAINLOOSE] + ')' +
|
|
'\\s*$'
|
|
|
|
// Star ranges basically just allow anything at all.
|
|
tok('STAR')
|
|
src[t.STAR] = '(<|>)?=?\\s*\\*'
|
|
|
|
// Compile to actual regexp objects.
|
|
// All are flag-free, unless they were created above with a flag.
|
|
for (var i = 0; i < R; i++) {
|
|
debug(i, src[i])
|
|
if (!re[i]) {
|
|
re[i] = new RegExp(src[i])
|
|
}
|
|
}
|
|
|
|
exports.parse = parse
|
|
function parse (version, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (version instanceof SemVer) {
|
|
return version
|
|
}
|
|
|
|
if (typeof version !== 'string') {
|
|
return null
|
|
}
|
|
|
|
if (version.length > MAX_LENGTH) {
|
|
return null
|
|
}
|
|
|
|
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
|
if (!r.test(version)) {
|
|
return null
|
|
}
|
|
|
|
try {
|
|
return new SemVer(version, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
exports.valid = valid
|
|
function valid (version, options) {
|
|
var v = parse(version, options)
|
|
return v ? v.version : null
|
|
}
|
|
|
|
exports.clean = clean
|
|
function clean (version, options) {
|
|
var s = parse(version.trim().replace(/^[=v]+/, ''), options)
|
|
return s ? s.version : null
|
|
}
|
|
|
|
exports.SemVer = SemVer
|
|
|
|
function SemVer (version, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
if (version instanceof SemVer) {
|
|
if (version.loose === options.loose) {
|
|
return version
|
|
} else {
|
|
version = version.version
|
|
}
|
|
} else if (typeof version !== 'string') {
|
|
throw new TypeError('Invalid Version: ' + version)
|
|
}
|
|
|
|
if (version.length > MAX_LENGTH) {
|
|
throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
|
|
}
|
|
|
|
if (!(this instanceof SemVer)) {
|
|
return new SemVer(version, options)
|
|
}
|
|
|
|
debug('SemVer', version, options)
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
|
|
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
|
|
|
if (!m) {
|
|
throw new TypeError('Invalid Version: ' + version)
|
|
}
|
|
|
|
this.raw = version
|
|
|
|
// these are actually numbers
|
|
this.major = +m[1]
|
|
this.minor = +m[2]
|
|
this.patch = +m[3]
|
|
|
|
if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
|
|
throw new TypeError('Invalid major version')
|
|
}
|
|
|
|
if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
|
|
throw new TypeError('Invalid minor version')
|
|
}
|
|
|
|
if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
|
|
throw new TypeError('Invalid patch version')
|
|
}
|
|
|
|
// numberify any prerelease numeric ids
|
|
if (!m[4]) {
|
|
this.prerelease = []
|
|
} else {
|
|
this.prerelease = m[4].split('.').map(function (id) {
|
|
if (/^[0-9]+$/.test(id)) {
|
|
var num = +id
|
|
if (num >= 0 && num < MAX_SAFE_INTEGER) {
|
|
return num
|
|
}
|
|
}
|
|
return id
|
|
})
|
|
}
|
|
|
|
this.build = m[5] ? m[5].split('.') : []
|
|
this.format()
|
|
}
|
|
|
|
SemVer.prototype.format = function () {
|
|
this.version = this.major + '.' + this.minor + '.' + this.patch
|
|
if (this.prerelease.length) {
|
|
this.version += '-' + this.prerelease.join('.')
|
|
}
|
|
return this.version
|
|
}
|
|
|
|
SemVer.prototype.toString = function () {
|
|
return this.version
|
|
}
|
|
|
|
SemVer.prototype.compare = function (other) {
|
|
debug('SemVer.compare', this.version, this.options, other)
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
return this.compareMain(other) || this.comparePre(other)
|
|
}
|
|
|
|
SemVer.prototype.compareMain = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
return compareIdentifiers(this.major, other.major) ||
|
|
compareIdentifiers(this.minor, other.minor) ||
|
|
compareIdentifiers(this.patch, other.patch)
|
|
}
|
|
|
|
SemVer.prototype.comparePre = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
// NOT having a prerelease is > having one
|
|
if (this.prerelease.length && !other.prerelease.length) {
|
|
return -1
|
|
} else if (!this.prerelease.length && other.prerelease.length) {
|
|
return 1
|
|
} else if (!this.prerelease.length && !other.prerelease.length) {
|
|
return 0
|
|
}
|
|
|
|
var i = 0
|
|
do {
|
|
var a = this.prerelease[i]
|
|
var b = other.prerelease[i]
|
|
debug('prerelease compare', i, a, b)
|
|
if (a === undefined && b === undefined) {
|
|
return 0
|
|
} else if (b === undefined) {
|
|
return 1
|
|
} else if (a === undefined) {
|
|
return -1
|
|
} else if (a === b) {
|
|
continue
|
|
} else {
|
|
return compareIdentifiers(a, b)
|
|
}
|
|
} while (++i)
|
|
}
|
|
|
|
SemVer.prototype.compareBuild = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
var i = 0
|
|
do {
|
|
var a = this.build[i]
|
|
var b = other.build[i]
|
|
debug('prerelease compare', i, a, b)
|
|
if (a === undefined && b === undefined) {
|
|
return 0
|
|
} else if (b === undefined) {
|
|
return 1
|
|
} else if (a === undefined) {
|
|
return -1
|
|
} else if (a === b) {
|
|
continue
|
|
} else {
|
|
return compareIdentifiers(a, b)
|
|
}
|
|
} while (++i)
|
|
}
|
|
|
|
// preminor will bump the version up to the next minor release, and immediately
|
|
// down to pre-release. premajor and prepatch work the same way.
|
|
SemVer.prototype.inc = function (release, identifier) {
|
|
switch (release) {
|
|
case 'premajor':
|
|
this.prerelease.length = 0
|
|
this.patch = 0
|
|
this.minor = 0
|
|
this.major++
|
|
this.inc('pre', identifier)
|
|
break
|
|
case 'preminor':
|
|
this.prerelease.length = 0
|
|
this.patch = 0
|
|
this.minor++
|
|
this.inc('pre', identifier)
|
|
break
|
|
case 'prepatch':
|
|
// If this is already a prerelease, it will bump to the next version
|
|
// drop any prereleases that might already exist, since they are not
|
|
// relevant at this point.
|
|
this.prerelease.length = 0
|
|
this.inc('patch', identifier)
|
|
this.inc('pre', identifier)
|
|
break
|
|
// If the input is a non-prerelease version, this acts the same as
|
|
// prepatch.
|
|
case 'prerelease':
|
|
if (this.prerelease.length === 0) {
|
|
this.inc('patch', identifier)
|
|
}
|
|
this.inc('pre', identifier)
|
|
break
|
|
|
|
case 'major':
|
|
// If this is a pre-major version, bump up to the same major version.
|
|
// Otherwise increment major.
|
|
// 1.0.0-5 bumps to 1.0.0
|
|
// 1.1.0 bumps to 2.0.0
|
|
if (this.minor !== 0 ||
|
|
this.patch !== 0 ||
|
|
this.prerelease.length === 0) {
|
|
this.major++
|
|
}
|
|
this.minor = 0
|
|
this.patch = 0
|
|
this.prerelease = []
|
|
break
|
|
case 'minor':
|
|
// If this is a pre-minor version, bump up to the same minor version.
|
|
// Otherwise increment minor.
|
|
// 1.2.0-5 bumps to 1.2.0
|
|
// 1.2.1 bumps to 1.3.0
|
|
if (this.patch !== 0 || this.prerelease.length === 0) {
|
|
this.minor++
|
|
}
|
|
this.patch = 0
|
|
this.prerelease = []
|
|
break
|
|
case 'patch':
|
|
// If this is not a pre-release version, it will increment the patch.
|
|
// If it is a pre-release it will bump up to the same patch version.
|
|
// 1.2.0-5 patches to 1.2.0
|
|
// 1.2.0 patches to 1.2.1
|
|
if (this.prerelease.length === 0) {
|
|
this.patch++
|
|
}
|
|
this.prerelease = []
|
|
break
|
|
// This probably shouldn't be used publicly.
|
|
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
|
|
case 'pre':
|
|
if (this.prerelease.length === 0) {
|
|
this.prerelease = [0]
|
|
} else {
|
|
var i = this.prerelease.length
|
|
while (--i >= 0) {
|
|
if (typeof this.prerelease[i] === 'number') {
|
|
this.prerelease[i]++
|
|
i = -2
|
|
}
|
|
}
|
|
if (i === -1) {
|
|
// didn't increment anything
|
|
this.prerelease.push(0)
|
|
}
|
|
}
|
|
if (identifier) {
|
|
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
|
|
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
|
|
if (this.prerelease[0] === identifier) {
|
|
if (isNaN(this.prerelease[1])) {
|
|
this.prerelease = [identifier, 0]
|
|
}
|
|
} else {
|
|
this.prerelease = [identifier, 0]
|
|
}
|
|
}
|
|
break
|
|
|
|
default:
|
|
throw new Error('invalid increment argument: ' + release)
|
|
}
|
|
this.format()
|
|
this.raw = this.version
|
|
return this
|
|
}
|
|
|
|
exports.inc = inc
|
|
function inc (version, release, loose, identifier) {
|
|
if (typeof (loose) === 'string') {
|
|
identifier = loose
|
|
loose = undefined
|
|
}
|
|
|
|
try {
|
|
return new SemVer(version, loose).inc(release, identifier).version
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
exports.diff = diff
|
|
function diff (version1, version2) {
|
|
if (eq(version1, version2)) {
|
|
return null
|
|
} else {
|
|
var v1 = parse(version1)
|
|
var v2 = parse(version2)
|
|
var prefix = ''
|
|
if (v1.prerelease.length || v2.prerelease.length) {
|
|
prefix = 'pre'
|
|
var defaultResult = 'prerelease'
|
|
}
|
|
for (var key in v1) {
|
|
if (key === 'major' || key === 'minor' || key === 'patch') {
|
|
if (v1[key] !== v2[key]) {
|
|
return prefix + key
|
|
}
|
|
}
|
|
}
|
|
return defaultResult // may be undefined
|
|
}
|
|
}
|
|
|
|
exports.compareIdentifiers = compareIdentifiers
|
|
|
|
var numeric = /^[0-9]+$/
|
|
function compareIdentifiers (a, b) {
|
|
var anum = numeric.test(a)
|
|
var bnum = numeric.test(b)
|
|
|
|
if (anum && bnum) {
|
|
a = +a
|
|
b = +b
|
|
}
|
|
|
|
return a === b ? 0
|
|
: (anum && !bnum) ? -1
|
|
: (bnum && !anum) ? 1
|
|
: a < b ? -1
|
|
: 1
|
|
}
|
|
|
|
exports.rcompareIdentifiers = rcompareIdentifiers
|
|
function rcompareIdentifiers (a, b) {
|
|
return compareIdentifiers(b, a)
|
|
}
|
|
|
|
exports.major = major
|
|
function major (a, loose) {
|
|
return new SemVer(a, loose).major
|
|
}
|
|
|
|
exports.minor = minor
|
|
function minor (a, loose) {
|
|
return new SemVer(a, loose).minor
|
|
}
|
|
|
|
exports.patch = patch
|
|
function patch (a, loose) {
|
|
return new SemVer(a, loose).patch
|
|
}
|
|
|
|
exports.compare = compare
|
|
function compare (a, b, loose) {
|
|
return new SemVer(a, loose).compare(new SemVer(b, loose))
|
|
}
|
|
|
|
exports.compareLoose = compareLoose
|
|
function compareLoose (a, b) {
|
|
return compare(a, b, true)
|
|
}
|
|
|
|
exports.compareBuild = compareBuild
|
|
function compareBuild (a, b, loose) {
|
|
var versionA = new SemVer(a, loose)
|
|
var versionB = new SemVer(b, loose)
|
|
return versionA.compare(versionB) || versionA.compareBuild(versionB)
|
|
}
|
|
|
|
exports.rcompare = rcompare
|
|
function rcompare (a, b, loose) {
|
|
return compare(b, a, loose)
|
|
}
|
|
|
|
exports.sort = sort
|
|
function sort (list, loose) {
|
|
return list.sort(function (a, b) {
|
|
return exports.compareBuild(a, b, loose)
|
|
})
|
|
}
|
|
|
|
exports.rsort = rsort
|
|
function rsort (list, loose) {
|
|
return list.sort(function (a, b) {
|
|
return exports.compareBuild(b, a, loose)
|
|
})
|
|
}
|
|
|
|
exports.gt = gt
|
|
function gt (a, b, loose) {
|
|
return compare(a, b, loose) > 0
|
|
}
|
|
|
|
exports.lt = lt
|
|
function lt (a, b, loose) {
|
|
return compare(a, b, loose) < 0
|
|
}
|
|
|
|
exports.eq = eq
|
|
function eq (a, b, loose) {
|
|
return compare(a, b, loose) === 0
|
|
}
|
|
|
|
exports.neq = neq
|
|
function neq (a, b, loose) {
|
|
return compare(a, b, loose) !== 0
|
|
}
|
|
|
|
exports.gte = gte
|
|
function gte (a, b, loose) {
|
|
return compare(a, b, loose) >= 0
|
|
}
|
|
|
|
exports.lte = lte
|
|
function lte (a, b, loose) {
|
|
return compare(a, b, loose) <= 0
|
|
}
|
|
|
|
exports.cmp = cmp
|
|
function cmp (a, op, b, loose) {
|
|
switch (op) {
|
|
case '===':
|
|
if (typeof a === 'object')
|
|
a = a.version
|
|
if (typeof b === 'object')
|
|
b = b.version
|
|
return a === b
|
|
|
|
case '!==':
|
|
if (typeof a === 'object')
|
|
a = a.version
|
|
if (typeof b === 'object')
|
|
b = b.version
|
|
return a !== b
|
|
|
|
case '':
|
|
case '=':
|
|
case '==':
|
|
return eq(a, b, loose)
|
|
|
|
case '!=':
|
|
return neq(a, b, loose)
|
|
|
|
case '>':
|
|
return gt(a, b, loose)
|
|
|
|
case '>=':
|
|
return gte(a, b, loose)
|
|
|
|
case '<':
|
|
return lt(a, b, loose)
|
|
|
|
case '<=':
|
|
return lte(a, b, loose)
|
|
|
|
default:
|
|
throw new TypeError('Invalid operator: ' + op)
|
|
}
|
|
}
|
|
|
|
exports.Comparator = Comparator
|
|
function Comparator (comp, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (comp instanceof Comparator) {
|
|
if (comp.loose === !!options.loose) {
|
|
return comp
|
|
} else {
|
|
comp = comp.value
|
|
}
|
|
}
|
|
|
|
if (!(this instanceof Comparator)) {
|
|
return new Comparator(comp, options)
|
|
}
|
|
|
|
debug('comparator', comp, options)
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
this.parse(comp)
|
|
|
|
if (this.semver === ANY) {
|
|
this.value = ''
|
|
} else {
|
|
this.value = this.operator + this.semver.version
|
|
}
|
|
|
|
debug('comp', this)
|
|
}
|
|
|
|
var ANY = {}
|
|
Comparator.prototype.parse = function (comp) {
|
|
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
|
var m = comp.match(r)
|
|
|
|
if (!m) {
|
|
throw new TypeError('Invalid comparator: ' + comp)
|
|
}
|
|
|
|
this.operator = m[1] !== undefined ? m[1] : ''
|
|
if (this.operator === '=') {
|
|
this.operator = ''
|
|
}
|
|
|
|
// if it literally is just '>' or '' then allow anything.
|
|
if (!m[2]) {
|
|
this.semver = ANY
|
|
} else {
|
|
this.semver = new SemVer(m[2], this.options.loose)
|
|
}
|
|
}
|
|
|
|
Comparator.prototype.toString = function () {
|
|
return this.value
|
|
}
|
|
|
|
Comparator.prototype.test = function (version) {
|
|
debug('Comparator.test', version, this.options.loose)
|
|
|
|
if (this.semver === ANY || version === ANY) {
|
|
return true
|
|
}
|
|
|
|
if (typeof version === 'string') {
|
|
try {
|
|
version = new SemVer(version, this.options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
return cmp(version, this.operator, this.semver, this.options)
|
|
}
|
|
|
|
Comparator.prototype.intersects = function (comp, options) {
|
|
if (!(comp instanceof Comparator)) {
|
|
throw new TypeError('a Comparator is required')
|
|
}
|
|
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
var rangeTmp
|
|
|
|
if (this.operator === '') {
|
|
if (this.value === '') {
|
|
return true
|
|
}
|
|
rangeTmp = new Range(comp.value, options)
|
|
return satisfies(this.value, rangeTmp, options)
|
|
} else if (comp.operator === '') {
|
|
if (comp.value === '') {
|
|
return true
|
|
}
|
|
rangeTmp = new Range(this.value, options)
|
|
return satisfies(comp.semver, rangeTmp, options)
|
|
}
|
|
|
|
var sameDirectionIncreasing =
|
|
(this.operator === '>=' || this.operator === '>') &&
|
|
(comp.operator === '>=' || comp.operator === '>')
|
|
var sameDirectionDecreasing =
|
|
(this.operator === '<=' || this.operator === '<') &&
|
|
(comp.operator === '<=' || comp.operator === '<')
|
|
var sameSemVer = this.semver.version === comp.semver.version
|
|
var differentDirectionsInclusive =
|
|
(this.operator === '>=' || this.operator === '<=') &&
|
|
(comp.operator === '>=' || comp.operator === '<=')
|
|
var oppositeDirectionsLessThan =
|
|
cmp(this.semver, '<', comp.semver, options) &&
|
|
((this.operator === '>=' || this.operator === '>') &&
|
|
(comp.operator === '<=' || comp.operator === '<'))
|
|
var oppositeDirectionsGreaterThan =
|
|
cmp(this.semver, '>', comp.semver, options) &&
|
|
((this.operator === '<=' || this.operator === '<') &&
|
|
(comp.operator === '>=' || comp.operator === '>'))
|
|
|
|
return sameDirectionIncreasing || sameDirectionDecreasing ||
|
|
(sameSemVer && differentDirectionsInclusive) ||
|
|
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
|
|
}
|
|
|
|
exports.Range = Range
|
|
function Range (range, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (range instanceof Range) {
|
|
if (range.loose === !!options.loose &&
|
|
range.includePrerelease === !!options.includePrerelease) {
|
|
return range
|
|
} else {
|
|
return new Range(range.raw, options)
|
|
}
|
|
}
|
|
|
|
if (range instanceof Comparator) {
|
|
return new Range(range.value, options)
|
|
}
|
|
|
|
if (!(this instanceof Range)) {
|
|
return new Range(range, options)
|
|
}
|
|
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
this.includePrerelease = !!options.includePrerelease
|
|
|
|
// First, split based on boolean or ||
|
|
this.raw = range
|
|
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
|
return this.parseRange(range.trim())
|
|
}, this).filter(function (c) {
|
|
// throw out any that are not relevant for whatever reason
|
|
return c.length
|
|
})
|
|
|
|
if (!this.set.length) {
|
|
throw new TypeError('Invalid SemVer Range: ' + range)
|
|
}
|
|
|
|
this.format()
|
|
}
|
|
|
|
Range.prototype.format = function () {
|
|
this.range = this.set.map(function (comps) {
|
|
return comps.join(' ').trim()
|
|
}).join('||').trim()
|
|
return this.range
|
|
}
|
|
|
|
Range.prototype.toString = function () {
|
|
return this.range
|
|
}
|
|
|
|
Range.prototype.parseRange = function (range) {
|
|
var loose = this.options.loose
|
|
range = range.trim()
|
|
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
|
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
|
range = range.replace(hr, hyphenReplace)
|
|
debug('hyphen replace', range)
|
|
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
|
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
|
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
|
|
|
// `~ 1.2.3` => `~1.2.3`
|
|
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
|
|
|
// `^ 1.2.3` => `^1.2.3`
|
|
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
|
|
|
// normalize spaces
|
|
range = range.split(/\s+/).join(' ')
|
|
|
|
// At this point, the range is completely trimmed and
|
|
// ready to be split into comparators.
|
|
|
|
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
|
var set = range.split(' ').map(function (comp) {
|
|
return parseComparator(comp, this.options)
|
|
}, this).join(' ').split(/\s+/)
|
|
if (this.options.loose) {
|
|
// in loose mode, throw out any that are not valid comparators
|
|
set = set.filter(function (comp) {
|
|
return !!comp.match(compRe)
|
|
})
|
|
}
|
|
set = set.map(function (comp) {
|
|
return new Comparator(comp, this.options)
|
|
}, this)
|
|
|
|
return set
|
|
}
|
|
|
|
Range.prototype.intersects = function (range, options) {
|
|
if (!(range instanceof Range)) {
|
|
throw new TypeError('a Range is required')
|
|
}
|
|
|
|
return this.set.some(function (thisComparators) {
|
|
return (
|
|
isSatisfiable(thisComparators, options) &&
|
|
range.set.some(function (rangeComparators) {
|
|
return (
|
|
isSatisfiable(rangeComparators, options) &&
|
|
thisComparators.every(function (thisComparator) {
|
|
return rangeComparators.every(function (rangeComparator) {
|
|
return thisComparator.intersects(rangeComparator, options)
|
|
})
|
|
})
|
|
)
|
|
})
|
|
)
|
|
})
|
|
}
|
|
|
|
// take a set of comparators and determine whether there
|
|
// exists a version which can satisfy it
|
|
function isSatisfiable (comparators, options) {
|
|
var result = true
|
|
var remainingComparators = comparators.slice()
|
|
var testComparator = remainingComparators.pop()
|
|
|
|
while (result && remainingComparators.length) {
|
|
result = remainingComparators.every(function (otherComparator) {
|
|
return testComparator.intersects(otherComparator, options)
|
|
})
|
|
|
|
testComparator = remainingComparators.pop()
|
|
}
|
|
|
|
return result
|
|
}
|
|
|
|
// Mostly just for testing and legacy API reasons
|
|
exports.toComparators = toComparators
|
|
function toComparators (range, options) {
|
|
return new Range(range, options).set.map(function (comp) {
|
|
return comp.map(function (c) {
|
|
return c.value
|
|
}).join(' ').trim().split(' ')
|
|
})
|
|
}
|
|
|
|
// comprised of xranges, tildes, stars, and gtlt's at this point.
|
|
// already replaced the hyphen ranges
|
|
// turn into a set of JUST comparators.
|
|
function parseComparator (comp, options) {
|
|
debug('comp', comp, options)
|
|
comp = replaceCarets(comp, options)
|
|
debug('caret', comp)
|
|
comp = replaceTildes(comp, options)
|
|
debug('tildes', comp)
|
|
comp = replaceXRanges(comp, options)
|
|
debug('xrange', comp)
|
|
comp = replaceStars(comp, options)
|
|
debug('stars', comp)
|
|
return comp
|
|
}
|
|
|
|
function isX (id) {
|
|
return !id || id.toLowerCase() === 'x' || id === '*'
|
|
}
|
|
|
|
// ~, ~> --> * (any, kinda silly)
|
|
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
|
|
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
|
|
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
|
|
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
|
|
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
|
|
function replaceTildes (comp, options) {
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
|
return replaceTilde(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceTilde (comp, options) {
|
|
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
|
debug('tilde', comp, _, M, m, p, pr)
|
|
var ret
|
|
|
|
if (isX(M)) {
|
|
ret = ''
|
|
} else if (isX(m)) {
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
|
} else if (isX(p)) {
|
|
// ~1.2 == >=1.2.0 <1.3.0
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
|
} else if (pr) {
|
|
debug('replaceTilde pr', pr)
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
} else {
|
|
// ~1.2.3 == >=1.2.3 <1.3.0
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
|
|
debug('tilde return', ret)
|
|
return ret
|
|
})
|
|
}
|
|
|
|
// ^ --> * (any, kinda silly)
|
|
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
|
|
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
|
|
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
|
|
// ^1.2.3 --> >=1.2.3 <2.0.0
|
|
// ^1.2.0 --> >=1.2.0 <2.0.0
|
|
function replaceCarets (comp, options) {
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
|
return replaceCaret(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceCaret (comp, options) {
|
|
debug('caret', comp, options)
|
|
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
|
debug('caret', comp, _, M, m, p, pr)
|
|
var ret
|
|
|
|
if (isX(M)) {
|
|
ret = ''
|
|
} else if (isX(m)) {
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
|
} else if (isX(p)) {
|
|
if (M === '0') {
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
|
|
}
|
|
} else if (pr) {
|
|
debug('replaceCaret pr', pr)
|
|
if (M === '0') {
|
|
if (m === '0') {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + (+M + 1) + '.0.0'
|
|
}
|
|
} else {
|
|
debug('no pr')
|
|
if (M === '0') {
|
|
if (m === '0') {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + (+M + 1) + '.0.0'
|
|
}
|
|
}
|
|
|
|
debug('caret return', ret)
|
|
return ret
|
|
})
|
|
}
|
|
|
|
function replaceXRanges (comp, options) {
|
|
debug('replaceXRanges', comp, options)
|
|
return comp.split(/\s+/).map(function (comp) {
|
|
return replaceXRange(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceXRange (comp, options) {
|
|
comp = comp.trim()
|
|
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
|
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
|
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
|
var xM = isX(M)
|
|
var xm = xM || isX(m)
|
|
var xp = xm || isX(p)
|
|
var anyX = xp
|
|
|
|
if (gtlt === '=' && anyX) {
|
|
gtlt = ''
|
|
}
|
|
|
|
// if we're including prereleases in the match, then we need
|
|
// to fix this to -0, the lowest possible prerelease value
|
|
pr = options.includePrerelease ? '-0' : ''
|
|
|
|
if (xM) {
|
|
if (gtlt === '>' || gtlt === '<') {
|
|
// nothing is allowed
|
|
ret = '<0.0.0-0'
|
|
} else {
|
|
// nothing is forbidden
|
|
ret = '*'
|
|
}
|
|
} else if (gtlt && anyX) {
|
|
// we know patch is an x, because we have any x at all.
|
|
// replace X with 0
|
|
if (xm) {
|
|
m = 0
|
|
}
|
|
p = 0
|
|
|
|
if (gtlt === '>') {
|
|
// >1 => >=2.0.0
|
|
// >1.2 => >=1.3.0
|
|
// >1.2.3 => >= 1.2.4
|
|
gtlt = '>='
|
|
if (xm) {
|
|
M = +M + 1
|
|
m = 0
|
|
p = 0
|
|
} else {
|
|
m = +m + 1
|
|
p = 0
|
|
}
|
|
} else if (gtlt === '<=') {
|
|
// <=0.7.x is actually <0.8.0, since any 0.7.x should
|
|
// pass. Similarly, <=7.x is actually <8.0.0, etc.
|
|
gtlt = '<'
|
|
if (xm) {
|
|
M = +M + 1
|
|
} else {
|
|
m = +m + 1
|
|
}
|
|
}
|
|
|
|
ret = gtlt + M + '.' + m + '.' + p + pr
|
|
} else if (xm) {
|
|
ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr
|
|
} else if (xp) {
|
|
ret = '>=' + M + '.' + m + '.0' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0' + pr
|
|
}
|
|
|
|
debug('xRange return', ret)
|
|
|
|
return ret
|
|
})
|
|
}
|
|
|
|
// Because * is AND-ed with everything else in the comparator,
|
|
// and '' means "any version", just remove the *s entirely.
|
|
function replaceStars (comp, options) {
|
|
debug('replaceStars', comp, options)
|
|
// Looseness is ignored here. star is always as loose as it gets!
|
|
return comp.trim().replace(re[t.STAR], '')
|
|
}
|
|
|
|
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
|
// M, m, patch, prerelease, build
|
|
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
|
|
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
|
|
// 1.2 - 3.4 => >=1.2.0 <3.5.0
|
|
function hyphenReplace ($0,
|
|
from, fM, fm, fp, fpr, fb,
|
|
to, tM, tm, tp, tpr, tb) {
|
|
if (isX(fM)) {
|
|
from = ''
|
|
} else if (isX(fm)) {
|
|
from = '>=' + fM + '.0.0'
|
|
} else if (isX(fp)) {
|
|
from = '>=' + fM + '.' + fm + '.0'
|
|
} else {
|
|
from = '>=' + from
|
|
}
|
|
|
|
if (isX(tM)) {
|
|
to = ''
|
|
} else if (isX(tm)) {
|
|
to = '<' + (+tM + 1) + '.0.0'
|
|
} else if (isX(tp)) {
|
|
to = '<' + tM + '.' + (+tm + 1) + '.0'
|
|
} else if (tpr) {
|
|
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
|
|
} else {
|
|
to = '<=' + to
|
|
}
|
|
|
|
return (from + ' ' + to).trim()
|
|
}
|
|
|
|
// if ANY of the sets match ALL of its comparators, then pass
|
|
Range.prototype.test = function (version) {
|
|
if (!version) {
|
|
return false
|
|
}
|
|
|
|
if (typeof version === 'string') {
|
|
try {
|
|
version = new SemVer(version, this.options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
for (var i = 0; i < this.set.length; i++) {
|
|
if (testSet(this.set[i], version, this.options)) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
function testSet (set, version, options) {
|
|
for (var i = 0; i < set.length; i++) {
|
|
if (!set[i].test(version)) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
if (version.prerelease.length && !options.includePrerelease) {
|
|
// Find the set of versions that are allowed to have prereleases
|
|
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
|
|
// That should allow `1.2.3-pr.2` to pass.
|
|
// However, `1.2.4-alpha.notready` should NOT be allowed,
|
|
// even though it's within the range set by the comparators.
|
|
for (i = 0; i < set.length; i++) {
|
|
debug(set[i].semver)
|
|
if (set[i].semver === ANY) {
|
|
continue
|
|
}
|
|
|
|
if (set[i].semver.prerelease.length > 0) {
|
|
var allowed = set[i].semver
|
|
if (allowed.major === version.major &&
|
|
allowed.minor === version.minor &&
|
|
allowed.patch === version.patch) {
|
|
return true
|
|
}
|
|
}
|
|
}
|
|
|
|
// Version has a -pre, but it's not one of the ones we like.
|
|
return false
|
|
}
|
|
|
|
return true
|
|
}
|
|
|
|
exports.satisfies = satisfies
|
|
function satisfies (version, range, options) {
|
|
try {
|
|
range = new Range(range, options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
return range.test(version)
|
|
}
|
|
|
|
exports.maxSatisfying = maxSatisfying
|
|
function maxSatisfying (versions, range, options) {
|
|
var max = null
|
|
var maxSV = null
|
|
try {
|
|
var rangeObj = new Range(range, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
versions.forEach(function (v) {
|
|
if (rangeObj.test(v)) {
|
|
// satisfies(v, range, options)
|
|
if (!max || maxSV.compare(v) === -1) {
|
|
// compare(max, v, true)
|
|
max = v
|
|
maxSV = new SemVer(max, options)
|
|
}
|
|
}
|
|
})
|
|
return max
|
|
}
|
|
|
|
exports.minSatisfying = minSatisfying
|
|
function minSatisfying (versions, range, options) {
|
|
var min = null
|
|
var minSV = null
|
|
try {
|
|
var rangeObj = new Range(range, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
versions.forEach(function (v) {
|
|
if (rangeObj.test(v)) {
|
|
// satisfies(v, range, options)
|
|
if (!min || minSV.compare(v) === 1) {
|
|
// compare(min, v, true)
|
|
min = v
|
|
minSV = new SemVer(min, options)
|
|
}
|
|
}
|
|
})
|
|
return min
|
|
}
|
|
|
|
exports.minVersion = minVersion
|
|
function minVersion (range, loose) {
|
|
range = new Range(range, loose)
|
|
|
|
var minver = new SemVer('0.0.0')
|
|
if (range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
minver = new SemVer('0.0.0-0')
|
|
if (range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
minver = null
|
|
for (var i = 0; i < range.set.length; ++i) {
|
|
var comparators = range.set[i]
|
|
|
|
comparators.forEach(function (comparator) {
|
|
// Clone to avoid manipulating the comparator's semver object.
|
|
var compver = new SemVer(comparator.semver.version)
|
|
switch (comparator.operator) {
|
|
case '>':
|
|
if (compver.prerelease.length === 0) {
|
|
compver.patch++
|
|
} else {
|
|
compver.prerelease.push(0)
|
|
}
|
|
compver.raw = compver.format()
|
|
/* fallthrough */
|
|
case '':
|
|
case '>=':
|
|
if (!minver || gt(minver, compver)) {
|
|
minver = compver
|
|
}
|
|
break
|
|
case '<':
|
|
case '<=':
|
|
/* Ignore maximum versions */
|
|
break
|
|
/* istanbul ignore next */
|
|
default:
|
|
throw new Error('Unexpected operation: ' + comparator.operator)
|
|
}
|
|
})
|
|
}
|
|
|
|
if (minver && range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
exports.validRange = validRange
|
|
function validRange (range, options) {
|
|
try {
|
|
// Return '*' instead of '' so that truthiness works.
|
|
// This will throw if it's invalid anyway
|
|
return new Range(range, options).range || '*'
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
// Determine if version is less than all the versions possible in the range
|
|
exports.ltr = ltr
|
|
function ltr (version, range, options) {
|
|
return outside(version, range, '<', options)
|
|
}
|
|
|
|
// Determine if version is greater than all the versions possible in the range.
|
|
exports.gtr = gtr
|
|
function gtr (version, range, options) {
|
|
return outside(version, range, '>', options)
|
|
}
|
|
|
|
exports.outside = outside
|
|
function outside (version, range, hilo, options) {
|
|
version = new SemVer(version, options)
|
|
range = new Range(range, options)
|
|
|
|
var gtfn, ltefn, ltfn, comp, ecomp
|
|
switch (hilo) {
|
|
case '>':
|
|
gtfn = gt
|
|
ltefn = lte
|
|
ltfn = lt
|
|
comp = '>'
|
|
ecomp = '>='
|
|
break
|
|
case '<':
|
|
gtfn = lt
|
|
ltefn = gte
|
|
ltfn = gt
|
|
comp = '<'
|
|
ecomp = '<='
|
|
break
|
|
default:
|
|
throw new TypeError('Must provide a hilo val of "<" or ">"')
|
|
}
|
|
|
|
// If it satisifes the range it is not outside
|
|
if (satisfies(version, range, options)) {
|
|
return false
|
|
}
|
|
|
|
// From now on, variable terms are as if we're in "gtr" mode.
|
|
// but note that everything is flipped for the "ltr" function.
|
|
|
|
for (var i = 0; i < range.set.length; ++i) {
|
|
var comparators = range.set[i]
|
|
|
|
var high = null
|
|
var low = null
|
|
|
|
comparators.forEach(function (comparator) {
|
|
if (comparator.semver === ANY) {
|
|
comparator = new Comparator('>=0.0.0')
|
|
}
|
|
high = high || comparator
|
|
low = low || comparator
|
|
if (gtfn(comparator.semver, high.semver, options)) {
|
|
high = comparator
|
|
} else if (ltfn(comparator.semver, low.semver, options)) {
|
|
low = comparator
|
|
}
|
|
})
|
|
|
|
// If the edge version comparator has a operator then our version
|
|
// isn't outside it
|
|
if (high.operator === comp || high.operator === ecomp) {
|
|
return false
|
|
}
|
|
|
|
// If the lowest version comparator has an operator and our version
|
|
// is less than it then it isn't higher than the range
|
|
if ((!low.operator || low.operator === comp) &&
|
|
ltefn(version, low.semver)) {
|
|
return false
|
|
} else if (low.operator === ecomp && ltfn(version, low.semver)) {
|
|
return false
|
|
}
|
|
}
|
|
return true
|
|
}
|
|
|
|
exports.prerelease = prerelease
|
|
function prerelease (version, options) {
|
|
var parsed = parse(version, options)
|
|
return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
|
|
}
|
|
|
|
exports.intersects = intersects
|
|
function intersects (r1, r2, options) {
|
|
r1 = new Range(r1, options)
|
|
r2 = new Range(r2, options)
|
|
return r1.intersects(r2)
|
|
}
|
|
|
|
exports.coerce = coerce
|
|
function coerce (version, options) {
|
|
if (version instanceof SemVer) {
|
|
return version
|
|
}
|
|
|
|
if (typeof version === 'number') {
|
|
version = String(version)
|
|
}
|
|
|
|
if (typeof version !== 'string') {
|
|
return null
|
|
}
|
|
|
|
options = options || {}
|
|
|
|
var match = null
|
|
if (!options.rtl) {
|
|
match = version.match(re[t.COERCE])
|
|
} else {
|
|
// Find the right-most coercible string that does not share
|
|
// a terminus with a more left-ward coercible string.
|
|
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
|
|
//
|
|
// Walk through the string checking with a /g regexp
|
|
// Manually set the index so as to pick up overlapping matches.
|
|
// Stop when we get a match that ends at the string end, since no
|
|
// coercible string can be more right-ward without the same terminus.
|
|
var next
|
|
while ((next = re[t.COERCERTL].exec(version)) &&
|
|
(!match || match.index + match[0].length !== version.length)
|
|
) {
|
|
if (!match ||
|
|
next.index + next[0].length !== match.index + match[0].length) {
|
|
match = next
|
|
}
|
|
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
|
}
|
|
// leave it in a clean state
|
|
re[t.COERCERTL].lastIndex = -1
|
|
}
|
|
|
|
if (match === null) {
|
|
return null
|
|
}
|
|
|
|
return parse(match[2] +
|
|
'.' + (match[3] || '0') +
|
|
'.' + (match[4] || '0'), options)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5995:
|
|
/***/ ((module) => {
|
|
|
|
module.exports = r => {
|
|
const n = process.versions.node.split('.').map(x => parseInt(x, 10))
|
|
r = r.split('.').map(x => parseInt(x, 10))
|
|
return n[0] > r[0] || (n[0] === r[0] && (n[1] > r[1] || (n[1] === r[1] && n[2] >= r[2])))
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3338:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(5622)
|
|
const mkdirsSync = __nccwpck_require__(2915).mkdirsSync
|
|
const utimesMillisSync = __nccwpck_require__(2548).utimesMillisSync
|
|
const stat = __nccwpck_require__(3901)
|
|
|
|
function copySync (src, dest, opts) {
|
|
if (typeof opts === 'function') {
|
|
opts = { filter: opts }
|
|
}
|
|
|
|
opts = opts || {}
|
|
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
|
|
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
|
|
|
|
// Warn about using preserveTimestamps on 32-bit node
|
|
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
|
console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n
|
|
see https://github.com/jprichardson/node-fs-extra/issues/269`)
|
|
}
|
|
|
|
const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy')
|
|
stat.checkParentPathsSync(src, srcStat, dest, 'copy')
|
|
return handleFilterAndCopy(destStat, src, dest, opts)
|
|
}
|
|
|
|
function handleFilterAndCopy (destStat, src, dest, opts) {
|
|
if (opts.filter && !opts.filter(src, dest)) return
|
|
const destParent = path.dirname(dest)
|
|
if (!fs.existsSync(destParent)) mkdirsSync(destParent)
|
|
return startCopy(destStat, src, dest, opts)
|
|
}
|
|
|
|
function startCopy (destStat, src, dest, opts) {
|
|
if (opts.filter && !opts.filter(src, dest)) return
|
|
return getStats(destStat, src, dest, opts)
|
|
}
|
|
|
|
function getStats (destStat, src, dest, opts) {
|
|
const statSync = opts.dereference ? fs.statSync : fs.lstatSync
|
|
const srcStat = statSync(src)
|
|
|
|
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)
|
|
else if (srcStat.isFile() ||
|
|
srcStat.isCharacterDevice() ||
|
|
srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts)
|
|
else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)
|
|
}
|
|
|
|
function onFile (srcStat, destStat, src, dest, opts) {
|
|
if (!destStat) return copyFile(srcStat, src, dest, opts)
|
|
return mayCopyFile(srcStat, src, dest, opts)
|
|
}
|
|
|
|
function mayCopyFile (srcStat, src, dest, opts) {
|
|
if (opts.overwrite) {
|
|
fs.unlinkSync(dest)
|
|
return copyFile(srcStat, src, dest, opts)
|
|
} else if (opts.errorOnExist) {
|
|
throw new Error(`'${dest}' already exists`)
|
|
}
|
|
}
|
|
|
|
function copyFile (srcStat, src, dest, opts) {
|
|
fs.copyFileSync(src, dest)
|
|
if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)
|
|
return setDestMode(dest, srcStat.mode)
|
|
}
|
|
|
|
function handleTimestamps (srcMode, src, dest) {
|
|
// Make sure the file is writable before setting the timestamp
|
|
// otherwise open fails with EPERM when invoked with 'r+'
|
|
// (through utimes call)
|
|
if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)
|
|
return setDestTimestamps(src, dest)
|
|
}
|
|
|
|
function fileIsNotWritable (srcMode) {
|
|
return (srcMode & 0o200) === 0
|
|
}
|
|
|
|
function makeFileWritable (dest, srcMode) {
|
|
return setDestMode(dest, srcMode | 0o200)
|
|
}
|
|
|
|
function setDestMode (dest, srcMode) {
|
|
return fs.chmodSync(dest, srcMode)
|
|
}
|
|
|
|
function setDestTimestamps (src, dest) {
|
|
// The initial srcStat.atime cannot be trusted
|
|
// because it is modified by the read(2) system call
|
|
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
|
const updatedSrcStat = fs.statSync(src)
|
|
return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
|
|
}
|
|
|
|
function onDir (srcStat, destStat, src, dest, opts) {
|
|
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)
|
|
if (destStat && !destStat.isDirectory()) {
|
|
throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
|
|
}
|
|
return copyDir(src, dest, opts)
|
|
}
|
|
|
|
function mkDirAndCopy (srcMode, src, dest, opts) {
|
|
fs.mkdirSync(dest)
|
|
copyDir(src, dest, opts)
|
|
return setDestMode(dest, srcMode)
|
|
}
|
|
|
|
function copyDir (src, dest, opts) {
|
|
fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts))
|
|
}
|
|
|
|
function copyDirItem (item, src, dest, opts) {
|
|
const srcItem = path.join(src, item)
|
|
const destItem = path.join(dest, item)
|
|
const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy')
|
|
return startCopy(destStat, srcItem, destItem, opts)
|
|
}
|
|
|
|
function onLink (destStat, src, dest, opts) {
|
|
let resolvedSrc = fs.readlinkSync(src)
|
|
if (opts.dereference) {
|
|
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
|
|
}
|
|
|
|
if (!destStat) {
|
|
return fs.symlinkSync(resolvedSrc, dest)
|
|
} else {
|
|
let resolvedDest
|
|
try {
|
|
resolvedDest = fs.readlinkSync(dest)
|
|
} catch (err) {
|
|
// dest exists and is a regular file or directory,
|
|
// Windows may throw UNKNOWN error. If dest already exists,
|
|
// fs throws error anyway, so no need to guard against it here.
|
|
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest)
|
|
throw err
|
|
}
|
|
if (opts.dereference) {
|
|
resolvedDest = path.resolve(process.cwd(), resolvedDest)
|
|
}
|
|
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
|
|
throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)
|
|
}
|
|
|
|
// prevent copy if src is a subdir of dest since unlinking
|
|
// dest in this case would result in removing src contents
|
|
// and therefore a broken symlink would be created.
|
|
if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
|
|
throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)
|
|
}
|
|
return copyLink(resolvedSrc, dest)
|
|
}
|
|
}
|
|
|
|
function copyLink (resolvedSrc, dest) {
|
|
fs.unlinkSync(dest)
|
|
return fs.symlinkSync(resolvedSrc, dest)
|
|
}
|
|
|
|
module.exports = copySync
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1135:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
module.exports = {
|
|
copySync: __nccwpck_require__(3338)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8834:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(5622)
|
|
const mkdirs = __nccwpck_require__(2915).mkdirs
|
|
const pathExists = __nccwpck_require__(3835).pathExists
|
|
const utimesMillis = __nccwpck_require__(2548).utimesMillis
|
|
const stat = __nccwpck_require__(3901)
|
|
|
|
function copy (src, dest, opts, cb) {
|
|
if (typeof opts === 'function' && !cb) {
|
|
cb = opts
|
|
opts = {}
|
|
} else if (typeof opts === 'function') {
|
|
opts = { filter: opts }
|
|
}
|
|
|
|
cb = cb || function () {}
|
|
opts = opts || {}
|
|
|
|
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
|
|
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
|
|
|
|
// Warn about using preserveTimestamps on 32-bit node
|
|
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
|
console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n
|
|
see https://github.com/jprichardson/node-fs-extra/issues/269`)
|
|
}
|
|
|
|
stat.checkPaths(src, dest, 'copy', (err, stats) => {
|
|
if (err) return cb(err)
|
|
const { srcStat, destStat } = stats
|
|
stat.checkParentPaths(src, srcStat, dest, 'copy', err => {
|
|
if (err) return cb(err)
|
|
if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb)
|
|
return checkParentDir(destStat, src, dest, opts, cb)
|
|
})
|
|
})
|
|
}
|
|
|
|
function checkParentDir (destStat, src, dest, opts, cb) {
|
|
const destParent = path.dirname(dest)
|
|
pathExists(destParent, (err, dirExists) => {
|
|
if (err) return cb(err)
|
|
if (dirExists) return startCopy(destStat, src, dest, opts, cb)
|
|
mkdirs(destParent, err => {
|
|
if (err) return cb(err)
|
|
return startCopy(destStat, src, dest, opts, cb)
|
|
})
|
|
})
|
|
}
|
|
|
|
function handleFilter (onInclude, destStat, src, dest, opts, cb) {
|
|
Promise.resolve(opts.filter(src, dest)).then(include => {
|
|
if (include) return onInclude(destStat, src, dest, opts, cb)
|
|
return cb()
|
|
}, error => cb(error))
|
|
}
|
|
|
|
function startCopy (destStat, src, dest, opts, cb) {
|
|
if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb)
|
|
return getStats(destStat, src, dest, opts, cb)
|
|
}
|
|
|
|
function getStats (destStat, src, dest, opts, cb) {
|
|
const stat = opts.dereference ? fs.stat : fs.lstat
|
|
stat(src, (err, srcStat) => {
|
|
if (err) return cb(err)
|
|
|
|
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb)
|
|
else if (srcStat.isFile() ||
|
|
srcStat.isCharacterDevice() ||
|
|
srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb)
|
|
else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb)
|
|
})
|
|
}
|
|
|
|
function onFile (srcStat, destStat, src, dest, opts, cb) {
|
|
if (!destStat) return copyFile(srcStat, src, dest, opts, cb)
|
|
return mayCopyFile(srcStat, src, dest, opts, cb)
|
|
}
|
|
|
|
function mayCopyFile (srcStat, src, dest, opts, cb) {
|
|
if (opts.overwrite) {
|
|
fs.unlink(dest, err => {
|
|
if (err) return cb(err)
|
|
return copyFile(srcStat, src, dest, opts, cb)
|
|
})
|
|
} else if (opts.errorOnExist) {
|
|
return cb(new Error(`'${dest}' already exists`))
|
|
} else return cb()
|
|
}
|
|
|
|
function copyFile (srcStat, src, dest, opts, cb) {
|
|
fs.copyFile(src, dest, err => {
|
|
if (err) return cb(err)
|
|
if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb)
|
|
return setDestMode(dest, srcStat.mode, cb)
|
|
})
|
|
}
|
|
|
|
function handleTimestampsAndMode (srcMode, src, dest, cb) {
|
|
// Make sure the file is writable before setting the timestamp
|
|
// otherwise open fails with EPERM when invoked with 'r+'
|
|
// (through utimes call)
|
|
if (fileIsNotWritable(srcMode)) {
|
|
return makeFileWritable(dest, srcMode, err => {
|
|
if (err) return cb(err)
|
|
return setDestTimestampsAndMode(srcMode, src, dest, cb)
|
|
})
|
|
}
|
|
return setDestTimestampsAndMode(srcMode, src, dest, cb)
|
|
}
|
|
|
|
function fileIsNotWritable (srcMode) {
|
|
return (srcMode & 0o200) === 0
|
|
}
|
|
|
|
function makeFileWritable (dest, srcMode, cb) {
|
|
return setDestMode(dest, srcMode | 0o200, cb)
|
|
}
|
|
|
|
function setDestTimestampsAndMode (srcMode, src, dest, cb) {
|
|
setDestTimestamps(src, dest, err => {
|
|
if (err) return cb(err)
|
|
return setDestMode(dest, srcMode, cb)
|
|
})
|
|
}
|
|
|
|
function setDestMode (dest, srcMode, cb) {
|
|
return fs.chmod(dest, srcMode, cb)
|
|
}
|
|
|
|
function setDestTimestamps (src, dest, cb) {
|
|
// The initial srcStat.atime cannot be trusted
|
|
// because it is modified by the read(2) system call
|
|
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
|
fs.stat(src, (err, updatedSrcStat) => {
|
|
if (err) return cb(err)
|
|
return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb)
|
|
})
|
|
}
|
|
|
|
function onDir (srcStat, destStat, src, dest, opts, cb) {
|
|
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb)
|
|
if (destStat && !destStat.isDirectory()) {
|
|
return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`))
|
|
}
|
|
return copyDir(src, dest, opts, cb)
|
|
}
|
|
|
|
function mkDirAndCopy (srcMode, src, dest, opts, cb) {
|
|
fs.mkdir(dest, err => {
|
|
if (err) return cb(err)
|
|
copyDir(src, dest, opts, err => {
|
|
if (err) return cb(err)
|
|
return setDestMode(dest, srcMode, cb)
|
|
})
|
|
})
|
|
}
|
|
|
|
function copyDir (src, dest, opts, cb) {
|
|
fs.readdir(src, (err, items) => {
|
|
if (err) return cb(err)
|
|
return copyDirItems(items, src, dest, opts, cb)
|
|
})
|
|
}
|
|
|
|
function copyDirItems (items, src, dest, opts, cb) {
|
|
const item = items.pop()
|
|
if (!item) return cb()
|
|
return copyDirItem(items, item, src, dest, opts, cb)
|
|
}
|
|
|
|
function copyDirItem (items, item, src, dest, opts, cb) {
|
|
const srcItem = path.join(src, item)
|
|
const destItem = path.join(dest, item)
|
|
stat.checkPaths(srcItem, destItem, 'copy', (err, stats) => {
|
|
if (err) return cb(err)
|
|
const { destStat } = stats
|
|
startCopy(destStat, srcItem, destItem, opts, err => {
|
|
if (err) return cb(err)
|
|
return copyDirItems(items, src, dest, opts, cb)
|
|
})
|
|
})
|
|
}
|
|
|
|
function onLink (destStat, src, dest, opts, cb) {
|
|
fs.readlink(src, (err, resolvedSrc) => {
|
|
if (err) return cb(err)
|
|
if (opts.dereference) {
|
|
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
|
|
}
|
|
|
|
if (!destStat) {
|
|
return fs.symlink(resolvedSrc, dest, cb)
|
|
} else {
|
|
fs.readlink(dest, (err, resolvedDest) => {
|
|
if (err) {
|
|
// dest exists and is a regular file or directory,
|
|
// Windows may throw UNKNOWN error. If dest already exists,
|
|
// fs throws error anyway, so no need to guard against it here.
|
|
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb)
|
|
return cb(err)
|
|
}
|
|
if (opts.dereference) {
|
|
resolvedDest = path.resolve(process.cwd(), resolvedDest)
|
|
}
|
|
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
|
|
return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`))
|
|
}
|
|
|
|
// do not copy if src is a subdir of dest since unlinking
|
|
// dest in this case would result in removing src contents
|
|
// and therefore a broken symlink would be created.
|
|
if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
|
|
return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`))
|
|
}
|
|
return copyLink(resolvedSrc, dest, cb)
|
|
})
|
|
}
|
|
})
|
|
}
|
|
|
|
function copyLink (resolvedSrc, dest, cb) {
|
|
fs.unlink(dest, err => {
|
|
if (err) return cb(err)
|
|
return fs.symlink(resolvedSrc, dest, cb)
|
|
})
|
|
}
|
|
|
|
module.exports = copy
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1335:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = __nccwpck_require__(746)/* .fromCallback */ .E
|
|
module.exports = {
|
|
copy: u(__nccwpck_require__(8834))
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6970:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = __nccwpck_require__(746)/* .fromCallback */ .E
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(5622)
|
|
const mkdir = __nccwpck_require__(2915)
|
|
const remove = __nccwpck_require__(7357)
|
|
|
|
const emptyDir = u(function emptyDir (dir, callback) {
|
|
callback = callback || function () {}
|
|
fs.readdir(dir, (err, items) => {
|
|
if (err) return mkdir.mkdirs(dir, callback)
|
|
|
|
items = items.map(item => path.join(dir, item))
|
|
|
|
deleteItem()
|
|
|
|
function deleteItem () {
|
|
const item = items.pop()
|
|
if (!item) return callback()
|
|
remove.remove(item, err => {
|
|
if (err) return callback(err)
|
|
deleteItem()
|
|
})
|
|
}
|
|
})
|
|
})
|
|
|
|
function emptyDirSync (dir) {
|
|
let items
|
|
try {
|
|
items = fs.readdirSync(dir)
|
|
} catch {
|
|
return mkdir.mkdirsSync(dir)
|
|
}
|
|
|
|
items.forEach(item => {
|
|
item = path.join(dir, item)
|
|
remove.removeSync(item)
|
|
})
|
|
}
|
|
|
|
module.exports = {
|
|
emptyDirSync,
|
|
emptydirSync: emptyDirSync,
|
|
emptyDir,
|
|
emptydir: emptyDir
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2164:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = __nccwpck_require__(746)/* .fromCallback */ .E
|
|
const path = __nccwpck_require__(5622)
|
|
const fs = __nccwpck_require__(7758)
|
|
const mkdir = __nccwpck_require__(2915)
|
|
|
|
function createFile (file, callback) {
|
|
function makeFile () {
|
|
fs.writeFile(file, '', err => {
|
|
if (err) return callback(err)
|
|
callback()
|
|
})
|
|
}
|
|
|
|
fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err
|
|
if (!err && stats.isFile()) return callback()
|
|
const dir = path.dirname(file)
|
|
fs.stat(dir, (err, stats) => {
|
|
if (err) {
|
|
// if the directory doesn't exist, make it
|
|
if (err.code === 'ENOENT') {
|
|
return mkdir.mkdirs(dir, err => {
|
|
if (err) return callback(err)
|
|
makeFile()
|
|
})
|
|
}
|
|
return callback(err)
|
|
}
|
|
|
|
if (stats.isDirectory()) makeFile()
|
|
else {
|
|
// parent is not a directory
|
|
// This is just to cause an internal ENOTDIR error to be thrown
|
|
fs.readdir(dir, err => {
|
|
if (err) return callback(err)
|
|
})
|
|
}
|
|
})
|
|
})
|
|
}
|
|
|
|
function createFileSync (file) {
|
|
let stats
|
|
try {
|
|
stats = fs.statSync(file)
|
|
} catch {}
|
|
if (stats && stats.isFile()) return
|
|
|
|
const dir = path.dirname(file)
|
|
try {
|
|
if (!fs.statSync(dir).isDirectory()) {
|
|
// parent is not a directory
|
|
// This is just to cause an internal ENOTDIR error to be thrown
|
|
fs.readdirSync(dir)
|
|
}
|
|
} catch (err) {
|
|
// If the stat call above failed because the directory doesn't exist, create it
|
|
if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)
|
|
else throw err
|
|
}
|
|
|
|
fs.writeFileSync(file, '')
|
|
}
|
|
|
|
module.exports = {
|
|
createFile: u(createFile),
|
|
createFileSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 55:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const file = __nccwpck_require__(2164)
|
|
const link = __nccwpck_require__(3797)
|
|
const symlink = __nccwpck_require__(2549)
|
|
|
|
module.exports = {
|
|
// file
|
|
createFile: file.createFile,
|
|
createFileSync: file.createFileSync,
|
|
ensureFile: file.createFile,
|
|
ensureFileSync: file.createFileSync,
|
|
// link
|
|
createLink: link.createLink,
|
|
createLinkSync: link.createLinkSync,
|
|
ensureLink: link.createLink,
|
|
ensureLinkSync: link.createLinkSync,
|
|
// symlink
|
|
createSymlink: symlink.createSymlink,
|
|
createSymlinkSync: symlink.createSymlinkSync,
|
|
ensureSymlink: symlink.createSymlink,
|
|
ensureSymlinkSync: symlink.createSymlinkSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3797:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = __nccwpck_require__(746)/* .fromCallback */ .E
|
|
const path = __nccwpck_require__(5622)
|
|
const fs = __nccwpck_require__(7758)
|
|
const mkdir = __nccwpck_require__(2915)
|
|
const pathExists = __nccwpck_require__(3835).pathExists
|
|
|
|
function createLink (srcpath, dstpath, callback) {
|
|
function makeLink (srcpath, dstpath) {
|
|
fs.link(srcpath, dstpath, err => {
|
|
if (err) return callback(err)
|
|
callback(null)
|
|
})
|
|
}
|
|
|
|
pathExists(dstpath, (err, destinationExists) => {
|
|
if (err) return callback(err)
|
|
if (destinationExists) return callback(null)
|
|
fs.lstat(srcpath, (err) => {
|
|
if (err) {
|
|
err.message = err.message.replace('lstat', 'ensureLink')
|
|
return callback(err)
|
|
}
|
|
|
|
const dir = path.dirname(dstpath)
|
|
pathExists(dir, (err, dirExists) => {
|
|
if (err) return callback(err)
|
|
if (dirExists) return makeLink(srcpath, dstpath)
|
|
mkdir.mkdirs(dir, err => {
|
|
if (err) return callback(err)
|
|
makeLink(srcpath, dstpath)
|
|
})
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
function createLinkSync (srcpath, dstpath) {
|
|
const destinationExists = fs.existsSync(dstpath)
|
|
if (destinationExists) return undefined
|
|
|
|
try {
|
|
fs.lstatSync(srcpath)
|
|
} catch (err) {
|
|
err.message = err.message.replace('lstat', 'ensureLink')
|
|
throw err
|
|
}
|
|
|
|
const dir = path.dirname(dstpath)
|
|
const dirExists = fs.existsSync(dir)
|
|
if (dirExists) return fs.linkSync(srcpath, dstpath)
|
|
mkdir.mkdirsSync(dir)
|
|
|
|
return fs.linkSync(srcpath, dstpath)
|
|
}
|
|
|
|
module.exports = {
|
|
createLink: u(createLink),
|
|
createLinkSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3727:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const path = __nccwpck_require__(5622)
|
|
const fs = __nccwpck_require__(7758)
|
|
const pathExists = __nccwpck_require__(3835).pathExists
|
|
|
|
/**
|
|
* Function that returns two types of paths, one relative to symlink, and one
|
|
* relative to the current working directory. Checks if path is absolute or
|
|
* relative. If the path is relative, this function checks if the path is
|
|
* relative to symlink or relative to current working directory. This is an
|
|
* initiative to find a smarter `srcpath` to supply when building symlinks.
|
|
* This allows you to determine which path to use out of one of three possible
|
|
* types of source paths. The first is an absolute path. This is detected by
|
|
* `path.isAbsolute()`. When an absolute path is provided, it is checked to
|
|
* see if it exists. If it does it's used, if not an error is returned
|
|
* (callback)/ thrown (sync). The other two options for `srcpath` are a
|
|
* relative url. By default Node's `fs.symlink` works by creating a symlink
|
|
* using `dstpath` and expects the `srcpath` to be relative to the newly
|
|
* created symlink. If you provide a `srcpath` that does not exist on the file
|
|
* system it results in a broken symlink. To minimize this, the function
|
|
* checks to see if the 'relative to symlink' source file exists, and if it
|
|
* does it will use it. If it does not, it checks if there's a file that
|
|
* exists that is relative to the current working directory, if does its used.
|
|
* This preserves the expectations of the original fs.symlink spec and adds
|
|
* the ability to pass in `relative to current working direcotry` paths.
|
|
*/
|
|
|
|
function symlinkPaths (srcpath, dstpath, callback) {
|
|
if (path.isAbsolute(srcpath)) {
|
|
return fs.lstat(srcpath, (err) => {
|
|
if (err) {
|
|
err.message = err.message.replace('lstat', 'ensureSymlink')
|
|
return callback(err)
|
|
}
|
|
return callback(null, {
|
|
toCwd: srcpath,
|
|
toDst: srcpath
|
|
})
|
|
})
|
|
} else {
|
|
const dstdir = path.dirname(dstpath)
|
|
const relativeToDst = path.join(dstdir, srcpath)
|
|
return pathExists(relativeToDst, (err, exists) => {
|
|
if (err) return callback(err)
|
|
if (exists) {
|
|
return callback(null, {
|
|
toCwd: relativeToDst,
|
|
toDst: srcpath
|
|
})
|
|
} else {
|
|
return fs.lstat(srcpath, (err) => {
|
|
if (err) {
|
|
err.message = err.message.replace('lstat', 'ensureSymlink')
|
|
return callback(err)
|
|
}
|
|
return callback(null, {
|
|
toCwd: srcpath,
|
|
toDst: path.relative(dstdir, srcpath)
|
|
})
|
|
})
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
function symlinkPathsSync (srcpath, dstpath) {
|
|
let exists
|
|
if (path.isAbsolute(srcpath)) {
|
|
exists = fs.existsSync(srcpath)
|
|
if (!exists) throw new Error('absolute srcpath does not exist')
|
|
return {
|
|
toCwd: srcpath,
|
|
toDst: srcpath
|
|
}
|
|
} else {
|
|
const dstdir = path.dirname(dstpath)
|
|
const relativeToDst = path.join(dstdir, srcpath)
|
|
exists = fs.existsSync(relativeToDst)
|
|
if (exists) {
|
|
return {
|
|
toCwd: relativeToDst,
|
|
toDst: srcpath
|
|
}
|
|
} else {
|
|
exists = fs.existsSync(srcpath)
|
|
if (!exists) throw new Error('relative srcpath does not exist')
|
|
return {
|
|
toCwd: srcpath,
|
|
toDst: path.relative(dstdir, srcpath)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
module.exports = {
|
|
symlinkPaths,
|
|
symlinkPathsSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8254:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
|
|
function symlinkType (srcpath, type, callback) {
|
|
callback = (typeof type === 'function') ? type : callback
|
|
type = (typeof type === 'function') ? false : type
|
|
if (type) return callback(null, type)
|
|
fs.lstat(srcpath, (err, stats) => {
|
|
if (err) return callback(null, 'file')
|
|
type = (stats && stats.isDirectory()) ? 'dir' : 'file'
|
|
callback(null, type)
|
|
})
|
|
}
|
|
|
|
function symlinkTypeSync (srcpath, type) {
|
|
let stats
|
|
|
|
if (type) return type
|
|
try {
|
|
stats = fs.lstatSync(srcpath)
|
|
} catch {
|
|
return 'file'
|
|
}
|
|
return (stats && stats.isDirectory()) ? 'dir' : 'file'
|
|
}
|
|
|
|
module.exports = {
|
|
symlinkType,
|
|
symlinkTypeSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2549:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = __nccwpck_require__(746)/* .fromCallback */ .E
|
|
const path = __nccwpck_require__(5622)
|
|
const fs = __nccwpck_require__(7758)
|
|
const _mkdirs = __nccwpck_require__(2915)
|
|
const mkdirs = _mkdirs.mkdirs
|
|
const mkdirsSync = _mkdirs.mkdirsSync
|
|
|
|
const _symlinkPaths = __nccwpck_require__(3727)
|
|
const symlinkPaths = _symlinkPaths.symlinkPaths
|
|
const symlinkPathsSync = _symlinkPaths.symlinkPathsSync
|
|
|
|
const _symlinkType = __nccwpck_require__(8254)
|
|
const symlinkType = _symlinkType.symlinkType
|
|
const symlinkTypeSync = _symlinkType.symlinkTypeSync
|
|
|
|
const pathExists = __nccwpck_require__(3835).pathExists
|
|
|
|
function createSymlink (srcpath, dstpath, type, callback) {
|
|
callback = (typeof type === 'function') ? type : callback
|
|
type = (typeof type === 'function') ? false : type
|
|
|
|
pathExists(dstpath, (err, destinationExists) => {
|
|
if (err) return callback(err)
|
|
if (destinationExists) return callback(null)
|
|
symlinkPaths(srcpath, dstpath, (err, relative) => {
|
|
if (err) return callback(err)
|
|
srcpath = relative.toDst
|
|
symlinkType(relative.toCwd, type, (err, type) => {
|
|
if (err) return callback(err)
|
|
const dir = path.dirname(dstpath)
|
|
pathExists(dir, (err, dirExists) => {
|
|
if (err) return callback(err)
|
|
if (dirExists) return fs.symlink(srcpath, dstpath, type, callback)
|
|
mkdirs(dir, err => {
|
|
if (err) return callback(err)
|
|
fs.symlink(srcpath, dstpath, type, callback)
|
|
})
|
|
})
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
function createSymlinkSync (srcpath, dstpath, type) {
|
|
const destinationExists = fs.existsSync(dstpath)
|
|
if (destinationExists) return undefined
|
|
|
|
const relative = symlinkPathsSync(srcpath, dstpath)
|
|
srcpath = relative.toDst
|
|
type = symlinkTypeSync(relative.toCwd, type)
|
|
const dir = path.dirname(dstpath)
|
|
const exists = fs.existsSync(dir)
|
|
if (exists) return fs.symlinkSync(srcpath, dstpath, type)
|
|
mkdirsSync(dir)
|
|
return fs.symlinkSync(srcpath, dstpath, type)
|
|
}
|
|
|
|
module.exports = {
|
|
createSymlink: u(createSymlink),
|
|
createSymlinkSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1176:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
// This is adapted from https://github.com/normalize/mz
|
|
// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors
|
|
const u = __nccwpck_require__(746)/* .fromCallback */ .E
|
|
const fs = __nccwpck_require__(7758)
|
|
|
|
const api = [
|
|
'access',
|
|
'appendFile',
|
|
'chmod',
|
|
'chown',
|
|
'close',
|
|
'copyFile',
|
|
'fchmod',
|
|
'fchown',
|
|
'fdatasync',
|
|
'fstat',
|
|
'fsync',
|
|
'ftruncate',
|
|
'futimes',
|
|
'lchmod',
|
|
'lchown',
|
|
'link',
|
|
'lstat',
|
|
'mkdir',
|
|
'mkdtemp',
|
|
'open',
|
|
'opendir',
|
|
'readdir',
|
|
'readFile',
|
|
'readlink',
|
|
'realpath',
|
|
'rename',
|
|
'rm',
|
|
'rmdir',
|
|
'stat',
|
|
'symlink',
|
|
'truncate',
|
|
'unlink',
|
|
'utimes',
|
|
'writeFile'
|
|
].filter(key => {
|
|
// Some commands are not available on some systems. Ex:
|
|
// fs.opendir was added in Node.js v12.12.0
|
|
// fs.rm was added in Node.js v14.14.0
|
|
// fs.lchown is not available on at least some Linux
|
|
return typeof fs[key] === 'function'
|
|
})
|
|
|
|
// Export all keys:
|
|
Object.keys(fs).forEach(key => {
|
|
if (key === 'promises') {
|
|
// fs.promises is a getter property that triggers ExperimentalWarning
|
|
// Don't re-export it here, the getter is defined in "lib/index.js"
|
|
return
|
|
}
|
|
exports[key] = fs[key]
|
|
})
|
|
|
|
// Universalify async methods:
|
|
api.forEach(method => {
|
|
exports[method] = u(fs[method])
|
|
})
|
|
|
|
// We differ from mz/fs in that we still ship the old, broken, fs.exists()
|
|
// since we are a drop-in replacement for the native module
|
|
exports.exists = function (filename, callback) {
|
|
if (typeof callback === 'function') {
|
|
return fs.exists(filename, callback)
|
|
}
|
|
return new Promise(resolve => {
|
|
return fs.exists(filename, resolve)
|
|
})
|
|
}
|
|
|
|
// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args
|
|
|
|
exports.read = function (fd, buffer, offset, length, position, callback) {
|
|
if (typeof callback === 'function') {
|
|
return fs.read(fd, buffer, offset, length, position, callback)
|
|
}
|
|
return new Promise((resolve, reject) => {
|
|
fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {
|
|
if (err) return reject(err)
|
|
resolve({ bytesRead, buffer })
|
|
})
|
|
})
|
|
}
|
|
|
|
// Function signature can be
|
|
// fs.write(fd, buffer[, offset[, length[, position]]], callback)
|
|
// OR
|
|
// fs.write(fd, string[, position[, encoding]], callback)
|
|
// We need to handle both cases, so we use ...args
|
|
exports.write = function (fd, buffer, ...args) {
|
|
if (typeof args[args.length - 1] === 'function') {
|
|
return fs.write(fd, buffer, ...args)
|
|
}
|
|
|
|
return new Promise((resolve, reject) => {
|
|
fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => {
|
|
if (err) return reject(err)
|
|
resolve({ bytesWritten, buffer })
|
|
})
|
|
})
|
|
}
|
|
|
|
// fs.writev only available in Node v12.9.0+
|
|
if (typeof fs.writev === 'function') {
|
|
// Function signature is
|
|
// s.writev(fd, buffers[, position], callback)
|
|
// We need to handle the optional arg, so we use ...args
|
|
exports.writev = function (fd, buffers, ...args) {
|
|
if (typeof args[args.length - 1] === 'function') {
|
|
return fs.writev(fd, buffers, ...args)
|
|
}
|
|
|
|
return new Promise((resolve, reject) => {
|
|
fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {
|
|
if (err) return reject(err)
|
|
resolve({ bytesWritten, buffers })
|
|
})
|
|
})
|
|
}
|
|
}
|
|
|
|
// fs.realpath.native only available in Node v9.2+
|
|
if (typeof fs.realpath.native === 'function') {
|
|
exports.realpath.native = u(fs.realpath.native)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5630:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
module.exports = {
|
|
// Export promiseified graceful-fs:
|
|
...__nccwpck_require__(1176),
|
|
// Export extra methods:
|
|
...__nccwpck_require__(1135),
|
|
...__nccwpck_require__(1335),
|
|
...__nccwpck_require__(6970),
|
|
...__nccwpck_require__(55),
|
|
...__nccwpck_require__(213),
|
|
...__nccwpck_require__(2915),
|
|
...__nccwpck_require__(9665),
|
|
...__nccwpck_require__(1497),
|
|
...__nccwpck_require__(6570),
|
|
...__nccwpck_require__(3835),
|
|
...__nccwpck_require__(7357)
|
|
}
|
|
|
|
// Export fs.promises as a getter property so that we don't trigger
|
|
// ExperimentalWarning before fs.promises is actually accessed.
|
|
const fs = __nccwpck_require__(5747)
|
|
if (Object.getOwnPropertyDescriptor(fs, 'promises')) {
|
|
Object.defineProperty(module.exports, "promises", ({
|
|
get () { return fs.promises }
|
|
}))
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 213:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = __nccwpck_require__(746)/* .fromPromise */ .p
|
|
const jsonFile = __nccwpck_require__(8970)
|
|
|
|
jsonFile.outputJson = u(__nccwpck_require__(531))
|
|
jsonFile.outputJsonSync = __nccwpck_require__(9421)
|
|
// aliases
|
|
jsonFile.outputJSON = jsonFile.outputJson
|
|
jsonFile.outputJSONSync = jsonFile.outputJsonSync
|
|
jsonFile.writeJSON = jsonFile.writeJson
|
|
jsonFile.writeJSONSync = jsonFile.writeJsonSync
|
|
jsonFile.readJSON = jsonFile.readJson
|
|
jsonFile.readJSONSync = jsonFile.readJsonSync
|
|
|
|
module.exports = jsonFile
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8970:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const jsonFile = __nccwpck_require__(6160)
|
|
|
|
module.exports = {
|
|
// jsonfile exports
|
|
readJson: jsonFile.readFile,
|
|
readJsonSync: jsonFile.readFileSync,
|
|
writeJson: jsonFile.writeFile,
|
|
writeJsonSync: jsonFile.writeFileSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9421:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const { stringify } = __nccwpck_require__(5902)
|
|
const { outputFileSync } = __nccwpck_require__(6570)
|
|
|
|
function outputJsonSync (file, data, options) {
|
|
const str = stringify(data, options)
|
|
|
|
outputFileSync(file, str, options)
|
|
}
|
|
|
|
module.exports = outputJsonSync
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 531:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const { stringify } = __nccwpck_require__(5902)
|
|
const { outputFile } = __nccwpck_require__(6570)
|
|
|
|
async function outputJson (file, data, options = {}) {
|
|
const str = stringify(data, options)
|
|
|
|
await outputFile(file, str, options)
|
|
}
|
|
|
|
module.exports = outputJson
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2915:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
const u = __nccwpck_require__(746)/* .fromPromise */ .p
|
|
const { makeDir: _makeDir, makeDirSync } = __nccwpck_require__(2751)
|
|
const makeDir = u(_makeDir)
|
|
|
|
module.exports = {
|
|
mkdirs: makeDir,
|
|
mkdirsSync: makeDirSync,
|
|
// alias
|
|
mkdirp: makeDir,
|
|
mkdirpSync: makeDirSync,
|
|
ensureDir: makeDir,
|
|
ensureDirSync: makeDirSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2751:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
// Adapted from https://github.com/sindresorhus/make-dir
|
|
// Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
|
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
|
|
const fs = __nccwpck_require__(1176)
|
|
const path = __nccwpck_require__(5622)
|
|
const atLeastNode = __nccwpck_require__(5995)
|
|
|
|
const useNativeRecursiveOption = atLeastNode('10.12.0')
|
|
|
|
// https://github.com/nodejs/node/issues/8987
|
|
// https://github.com/libuv/libuv/pull/1088
|
|
const checkPath = pth => {
|
|
if (process.platform === 'win32') {
|
|
const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, ''))
|
|
|
|
if (pathHasInvalidWinCharacters) {
|
|
const error = new Error(`Path contains invalid characters: ${pth}`)
|
|
error.code = 'EINVAL'
|
|
throw error
|
|
}
|
|
}
|
|
}
|
|
|
|
const processOptions = options => {
|
|
const defaults = { mode: 0o777 }
|
|
if (typeof options === 'number') options = { mode: options }
|
|
return { ...defaults, ...options }
|
|
}
|
|
|
|
const permissionError = pth => {
|
|
// This replicates the exception of `fs.mkdir` with native the
|
|
// `recusive` option when run on an invalid drive under Windows.
|
|
const error = new Error(`operation not permitted, mkdir '${pth}'`)
|
|
error.code = 'EPERM'
|
|
error.errno = -4048
|
|
error.path = pth
|
|
error.syscall = 'mkdir'
|
|
return error
|
|
}
|
|
|
|
module.exports.makeDir = async (input, options) => {
|
|
checkPath(input)
|
|
options = processOptions(options)
|
|
|
|
if (useNativeRecursiveOption) {
|
|
const pth = path.resolve(input)
|
|
|
|
return fs.mkdir(pth, {
|
|
mode: options.mode,
|
|
recursive: true
|
|
})
|
|
}
|
|
|
|
const make = async pth => {
|
|
try {
|
|
await fs.mkdir(pth, options.mode)
|
|
} catch (error) {
|
|
if (error.code === 'EPERM') {
|
|
throw error
|
|
}
|
|
|
|
if (error.code === 'ENOENT') {
|
|
if (path.dirname(pth) === pth) {
|
|
throw permissionError(pth)
|
|
}
|
|
|
|
if (error.message.includes('null bytes')) {
|
|
throw error
|
|
}
|
|
|
|
await make(path.dirname(pth))
|
|
return make(pth)
|
|
}
|
|
|
|
try {
|
|
const stats = await fs.stat(pth)
|
|
if (!stats.isDirectory()) {
|
|
// This error is never exposed to the user
|
|
// it is caught below, and the original error is thrown
|
|
throw new Error('The path is not a directory')
|
|
}
|
|
} catch {
|
|
throw error
|
|
}
|
|
}
|
|
}
|
|
|
|
return make(path.resolve(input))
|
|
}
|
|
|
|
module.exports.makeDirSync = (input, options) => {
|
|
checkPath(input)
|
|
options = processOptions(options)
|
|
|
|
if (useNativeRecursiveOption) {
|
|
const pth = path.resolve(input)
|
|
|
|
return fs.mkdirSync(pth, {
|
|
mode: options.mode,
|
|
recursive: true
|
|
})
|
|
}
|
|
|
|
const make = pth => {
|
|
try {
|
|
fs.mkdirSync(pth, options.mode)
|
|
} catch (error) {
|
|
if (error.code === 'EPERM') {
|
|
throw error
|
|
}
|
|
|
|
if (error.code === 'ENOENT') {
|
|
if (path.dirname(pth) === pth) {
|
|
throw permissionError(pth)
|
|
}
|
|
|
|
if (error.message.includes('null bytes')) {
|
|
throw error
|
|
}
|
|
|
|
make(path.dirname(pth))
|
|
return make(pth)
|
|
}
|
|
|
|
try {
|
|
if (!fs.statSync(pth).isDirectory()) {
|
|
// This error is never exposed to the user
|
|
// it is caught below, and the original error is thrown
|
|
throw new Error('The path is not a directory')
|
|
}
|
|
} catch {
|
|
throw error
|
|
}
|
|
}
|
|
}
|
|
|
|
return make(path.resolve(input))
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9665:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
module.exports = {
|
|
moveSync: __nccwpck_require__(6445)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6445:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(5622)
|
|
const copySync = __nccwpck_require__(1135).copySync
|
|
const removeSync = __nccwpck_require__(7357).removeSync
|
|
const mkdirpSync = __nccwpck_require__(2915).mkdirpSync
|
|
const stat = __nccwpck_require__(3901)
|
|
|
|
function moveSync (src, dest, opts) {
|
|
opts = opts || {}
|
|
const overwrite = opts.overwrite || opts.clobber || false
|
|
|
|
const { srcStat } = stat.checkPathsSync(src, dest, 'move')
|
|
stat.checkParentPathsSync(src, srcStat, dest, 'move')
|
|
mkdirpSync(path.dirname(dest))
|
|
return doRename(src, dest, overwrite)
|
|
}
|
|
|
|
function doRename (src, dest, overwrite) {
|
|
if (overwrite) {
|
|
removeSync(dest)
|
|
return rename(src, dest, overwrite)
|
|
}
|
|
if (fs.existsSync(dest)) throw new Error('dest already exists.')
|
|
return rename(src, dest, overwrite)
|
|
}
|
|
|
|
function rename (src, dest, overwrite) {
|
|
try {
|
|
fs.renameSync(src, dest)
|
|
} catch (err) {
|
|
if (err.code !== 'EXDEV') throw err
|
|
return moveAcrossDevice(src, dest, overwrite)
|
|
}
|
|
}
|
|
|
|
function moveAcrossDevice (src, dest, overwrite) {
|
|
const opts = {
|
|
overwrite,
|
|
errorOnExist: true
|
|
}
|
|
copySync(src, dest, opts)
|
|
return removeSync(src)
|
|
}
|
|
|
|
module.exports = moveSync
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1497:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = __nccwpck_require__(746)/* .fromCallback */ .E
|
|
module.exports = {
|
|
move: u(__nccwpck_require__(2231))
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2231:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(5622)
|
|
const copy = __nccwpck_require__(1335).copy
|
|
const remove = __nccwpck_require__(7357).remove
|
|
const mkdirp = __nccwpck_require__(2915).mkdirp
|
|
const pathExists = __nccwpck_require__(3835).pathExists
|
|
const stat = __nccwpck_require__(3901)
|
|
|
|
function move (src, dest, opts, cb) {
|
|
if (typeof opts === 'function') {
|
|
cb = opts
|
|
opts = {}
|
|
}
|
|
|
|
const overwrite = opts.overwrite || opts.clobber || false
|
|
|
|
stat.checkPaths(src, dest, 'move', (err, stats) => {
|
|
if (err) return cb(err)
|
|
const { srcStat } = stats
|
|
stat.checkParentPaths(src, srcStat, dest, 'move', err => {
|
|
if (err) return cb(err)
|
|
mkdirp(path.dirname(dest), err => {
|
|
if (err) return cb(err)
|
|
return doRename(src, dest, overwrite, cb)
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
function doRename (src, dest, overwrite, cb) {
|
|
if (overwrite) {
|
|
return remove(dest, err => {
|
|
if (err) return cb(err)
|
|
return rename(src, dest, overwrite, cb)
|
|
})
|
|
}
|
|
pathExists(dest, (err, destExists) => {
|
|
if (err) return cb(err)
|
|
if (destExists) return cb(new Error('dest already exists.'))
|
|
return rename(src, dest, overwrite, cb)
|
|
})
|
|
}
|
|
|
|
function rename (src, dest, overwrite, cb) {
|
|
fs.rename(src, dest, err => {
|
|
if (!err) return cb()
|
|
if (err.code !== 'EXDEV') return cb(err)
|
|
return moveAcrossDevice(src, dest, overwrite, cb)
|
|
})
|
|
}
|
|
|
|
function moveAcrossDevice (src, dest, overwrite, cb) {
|
|
const opts = {
|
|
overwrite,
|
|
errorOnExist: true
|
|
}
|
|
copy(src, dest, opts, err => {
|
|
if (err) return cb(err)
|
|
return remove(src, cb)
|
|
})
|
|
}
|
|
|
|
module.exports = move
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6570:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = __nccwpck_require__(746)/* .fromCallback */ .E
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(5622)
|
|
const mkdir = __nccwpck_require__(2915)
|
|
const pathExists = __nccwpck_require__(3835).pathExists
|
|
|
|
function outputFile (file, data, encoding, callback) {
|
|
if (typeof encoding === 'function') {
|
|
callback = encoding
|
|
encoding = 'utf8'
|
|
}
|
|
|
|
const dir = path.dirname(file)
|
|
pathExists(dir, (err, itDoes) => {
|
|
if (err) return callback(err)
|
|
if (itDoes) return fs.writeFile(file, data, encoding, callback)
|
|
|
|
mkdir.mkdirs(dir, err => {
|
|
if (err) return callback(err)
|
|
|
|
fs.writeFile(file, data, encoding, callback)
|
|
})
|
|
})
|
|
}
|
|
|
|
function outputFileSync (file, ...args) {
|
|
const dir = path.dirname(file)
|
|
if (fs.existsSync(dir)) {
|
|
return fs.writeFileSync(file, ...args)
|
|
}
|
|
mkdir.mkdirsSync(dir)
|
|
fs.writeFileSync(file, ...args)
|
|
}
|
|
|
|
module.exports = {
|
|
outputFile: u(outputFile),
|
|
outputFileSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3835:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
const u = __nccwpck_require__(746)/* .fromPromise */ .p
|
|
const fs = __nccwpck_require__(1176)
|
|
|
|
function pathExists (path) {
|
|
return fs.access(path).then(() => true).catch(() => false)
|
|
}
|
|
|
|
module.exports = {
|
|
pathExists: u(pathExists),
|
|
pathExistsSync: fs.existsSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7357:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = __nccwpck_require__(746)/* .fromCallback */ .E
|
|
const rimraf = __nccwpck_require__(8761)
|
|
|
|
module.exports = {
|
|
remove: u(rimraf),
|
|
removeSync: rimraf.sync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8761:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(5622)
|
|
const assert = __nccwpck_require__(2357)
|
|
|
|
const isWindows = (process.platform === 'win32')
|
|
|
|
function defaults (options) {
|
|
const methods = [
|
|
'unlink',
|
|
'chmod',
|
|
'stat',
|
|
'lstat',
|
|
'rmdir',
|
|
'readdir'
|
|
]
|
|
methods.forEach(m => {
|
|
options[m] = options[m] || fs[m]
|
|
m = m + 'Sync'
|
|
options[m] = options[m] || fs[m]
|
|
})
|
|
|
|
options.maxBusyTries = options.maxBusyTries || 3
|
|
}
|
|
|
|
function rimraf (p, options, cb) {
|
|
let busyTries = 0
|
|
|
|
if (typeof options === 'function') {
|
|
cb = options
|
|
options = {}
|
|
}
|
|
|
|
assert(p, 'rimraf: missing path')
|
|
assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')
|
|
assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required')
|
|
assert(options, 'rimraf: invalid options argument provided')
|
|
assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')
|
|
|
|
defaults(options)
|
|
|
|
rimraf_(p, options, function CB (er) {
|
|
if (er) {
|
|
if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') &&
|
|
busyTries < options.maxBusyTries) {
|
|
busyTries++
|
|
const time = busyTries * 100
|
|
// try again, with the same exact callback as this one.
|
|
return setTimeout(() => rimraf_(p, options, CB), time)
|
|
}
|
|
|
|
// already gone
|
|
if (er.code === 'ENOENT') er = null
|
|
}
|
|
|
|
cb(er)
|
|
})
|
|
}
|
|
|
|
// Two possible strategies.
|
|
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
|
|
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
|
|
//
|
|
// Both result in an extra syscall when you guess wrong. However, there
|
|
// are likely far more normal files in the world than directories. This
|
|
// is based on the assumption that a the average number of files per
|
|
// directory is >= 1.
|
|
//
|
|
// If anyone ever complains about this, then I guess the strategy could
|
|
// be made configurable somehow. But until then, YAGNI.
|
|
function rimraf_ (p, options, cb) {
|
|
assert(p)
|
|
assert(options)
|
|
assert(typeof cb === 'function')
|
|
|
|
// sunos lets the root user unlink directories, which is... weird.
|
|
// so we have to lstat here and make sure it's not a dir.
|
|
options.lstat(p, (er, st) => {
|
|
if (er && er.code === 'ENOENT') {
|
|
return cb(null)
|
|
}
|
|
|
|
// Windows can EPERM on stat. Life is suffering.
|
|
if (er && er.code === 'EPERM' && isWindows) {
|
|
return fixWinEPERM(p, options, er, cb)
|
|
}
|
|
|
|
if (st && st.isDirectory()) {
|
|
return rmdir(p, options, er, cb)
|
|
}
|
|
|
|
options.unlink(p, er => {
|
|
if (er) {
|
|
if (er.code === 'ENOENT') {
|
|
return cb(null)
|
|
}
|
|
if (er.code === 'EPERM') {
|
|
return (isWindows)
|
|
? fixWinEPERM(p, options, er, cb)
|
|
: rmdir(p, options, er, cb)
|
|
}
|
|
if (er.code === 'EISDIR') {
|
|
return rmdir(p, options, er, cb)
|
|
}
|
|
}
|
|
return cb(er)
|
|
})
|
|
})
|
|
}
|
|
|
|
function fixWinEPERM (p, options, er, cb) {
|
|
assert(p)
|
|
assert(options)
|
|
assert(typeof cb === 'function')
|
|
|
|
options.chmod(p, 0o666, er2 => {
|
|
if (er2) {
|
|
cb(er2.code === 'ENOENT' ? null : er)
|
|
} else {
|
|
options.stat(p, (er3, stats) => {
|
|
if (er3) {
|
|
cb(er3.code === 'ENOENT' ? null : er)
|
|
} else if (stats.isDirectory()) {
|
|
rmdir(p, options, er, cb)
|
|
} else {
|
|
options.unlink(p, cb)
|
|
}
|
|
})
|
|
}
|
|
})
|
|
}
|
|
|
|
function fixWinEPERMSync (p, options, er) {
|
|
let stats
|
|
|
|
assert(p)
|
|
assert(options)
|
|
|
|
try {
|
|
options.chmodSync(p, 0o666)
|
|
} catch (er2) {
|
|
if (er2.code === 'ENOENT') {
|
|
return
|
|
} else {
|
|
throw er
|
|
}
|
|
}
|
|
|
|
try {
|
|
stats = options.statSync(p)
|
|
} catch (er3) {
|
|
if (er3.code === 'ENOENT') {
|
|
return
|
|
} else {
|
|
throw er
|
|
}
|
|
}
|
|
|
|
if (stats.isDirectory()) {
|
|
rmdirSync(p, options, er)
|
|
} else {
|
|
options.unlinkSync(p)
|
|
}
|
|
}
|
|
|
|
function rmdir (p, options, originalEr, cb) {
|
|
assert(p)
|
|
assert(options)
|
|
assert(typeof cb === 'function')
|
|
|
|
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
|
|
// if we guessed wrong, and it's not a directory, then
|
|
// raise the original error.
|
|
options.rmdir(p, er => {
|
|
if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) {
|
|
rmkids(p, options, cb)
|
|
} else if (er && er.code === 'ENOTDIR') {
|
|
cb(originalEr)
|
|
} else {
|
|
cb(er)
|
|
}
|
|
})
|
|
}
|
|
|
|
function rmkids (p, options, cb) {
|
|
assert(p)
|
|
assert(options)
|
|
assert(typeof cb === 'function')
|
|
|
|
options.readdir(p, (er, files) => {
|
|
if (er) return cb(er)
|
|
|
|
let n = files.length
|
|
let errState
|
|
|
|
if (n === 0) return options.rmdir(p, cb)
|
|
|
|
files.forEach(f => {
|
|
rimraf(path.join(p, f), options, er => {
|
|
if (errState) {
|
|
return
|
|
}
|
|
if (er) return cb(errState = er)
|
|
if (--n === 0) {
|
|
options.rmdir(p, cb)
|
|
}
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
// this looks simpler, and is strictly *faster*, but will
|
|
// tie up the JavaScript thread and fail on excessively
|
|
// deep directory trees.
|
|
function rimrafSync (p, options) {
|
|
let st
|
|
|
|
options = options || {}
|
|
defaults(options)
|
|
|
|
assert(p, 'rimraf: missing path')
|
|
assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')
|
|
assert(options, 'rimraf: missing options')
|
|
assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')
|
|
|
|
try {
|
|
st = options.lstatSync(p)
|
|
} catch (er) {
|
|
if (er.code === 'ENOENT') {
|
|
return
|
|
}
|
|
|
|
// Windows can EPERM on stat. Life is suffering.
|
|
if (er.code === 'EPERM' && isWindows) {
|
|
fixWinEPERMSync(p, options, er)
|
|
}
|
|
}
|
|
|
|
try {
|
|
// sunos lets the root user unlink directories, which is... weird.
|
|
if (st && st.isDirectory()) {
|
|
rmdirSync(p, options, null)
|
|
} else {
|
|
options.unlinkSync(p)
|
|
}
|
|
} catch (er) {
|
|
if (er.code === 'ENOENT') {
|
|
return
|
|
} else if (er.code === 'EPERM') {
|
|
return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
|
|
} else if (er.code !== 'EISDIR') {
|
|
throw er
|
|
}
|
|
rmdirSync(p, options, er)
|
|
}
|
|
}
|
|
|
|
function rmdirSync (p, options, originalEr) {
|
|
assert(p)
|
|
assert(options)
|
|
|
|
try {
|
|
options.rmdirSync(p)
|
|
} catch (er) {
|
|
if (er.code === 'ENOTDIR') {
|
|
throw originalEr
|
|
} else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') {
|
|
rmkidsSync(p, options)
|
|
} else if (er.code !== 'ENOENT') {
|
|
throw er
|
|
}
|
|
}
|
|
}
|
|
|
|
function rmkidsSync (p, options) {
|
|
assert(p)
|
|
assert(options)
|
|
options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
|
|
|
|
if (isWindows) {
|
|
// We only end up here once we got ENOTEMPTY at least once, and
|
|
// at this point, we are guaranteed to have removed all the kids.
|
|
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
|
|
// try really hard to delete stuff on windows, because it has a
|
|
// PROFOUNDLY annoying habit of not closing handles promptly when
|
|
// files are deleted, resulting in spurious ENOTEMPTY errors.
|
|
const startTime = Date.now()
|
|
do {
|
|
try {
|
|
const ret = options.rmdirSync(p, options)
|
|
return ret
|
|
} catch {}
|
|
} while (Date.now() - startTime < 500) // give up after 500ms
|
|
} else {
|
|
const ret = options.rmdirSync(p, options)
|
|
return ret
|
|
}
|
|
}
|
|
|
|
module.exports = rimraf
|
|
rimraf.sync = rimrafSync
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3901:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(1176)
|
|
const path = __nccwpck_require__(5622)
|
|
const util = __nccwpck_require__(1669)
|
|
const atLeastNode = __nccwpck_require__(5995)
|
|
|
|
const nodeSupportsBigInt = atLeastNode('10.5.0')
|
|
const stat = (file) => nodeSupportsBigInt ? fs.stat(file, { bigint: true }) : fs.stat(file)
|
|
const statSync = (file) => nodeSupportsBigInt ? fs.statSync(file, { bigint: true }) : fs.statSync(file)
|
|
|
|
function getStats (src, dest) {
|
|
return Promise.all([
|
|
stat(src),
|
|
stat(dest).catch(err => {
|
|
if (err.code === 'ENOENT') return null
|
|
throw err
|
|
})
|
|
]).then(([srcStat, destStat]) => ({ srcStat, destStat }))
|
|
}
|
|
|
|
function getStatsSync (src, dest) {
|
|
let destStat
|
|
const srcStat = statSync(src)
|
|
try {
|
|
destStat = statSync(dest)
|
|
} catch (err) {
|
|
if (err.code === 'ENOENT') return { srcStat, destStat: null }
|
|
throw err
|
|
}
|
|
return { srcStat, destStat }
|
|
}
|
|
|
|
function checkPaths (src, dest, funcName, cb) {
|
|
util.callbackify(getStats)(src, dest, (err, stats) => {
|
|
if (err) return cb(err)
|
|
const { srcStat, destStat } = stats
|
|
if (destStat && areIdentical(srcStat, destStat)) {
|
|
return cb(new Error('Source and destination must not be the same.'))
|
|
}
|
|
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
|
return cb(new Error(errMsg(src, dest, funcName)))
|
|
}
|
|
return cb(null, { srcStat, destStat })
|
|
})
|
|
}
|
|
|
|
function checkPathsSync (src, dest, funcName) {
|
|
const { srcStat, destStat } = getStatsSync(src, dest)
|
|
if (destStat && areIdentical(srcStat, destStat)) {
|
|
throw new Error('Source and destination must not be the same.')
|
|
}
|
|
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
|
throw new Error(errMsg(src, dest, funcName))
|
|
}
|
|
return { srcStat, destStat }
|
|
}
|
|
|
|
// recursively check if dest parent is a subdirectory of src.
|
|
// It works for all file types including symlinks since it
|
|
// checks the src and dest inodes. It starts from the deepest
|
|
// parent and stops once it reaches the src parent or the root path.
|
|
function checkParentPaths (src, srcStat, dest, funcName, cb) {
|
|
const srcParent = path.resolve(path.dirname(src))
|
|
const destParent = path.resolve(path.dirname(dest))
|
|
if (destParent === srcParent || destParent === path.parse(destParent).root) return cb()
|
|
const callback = (err, destStat) => {
|
|
if (err) {
|
|
if (err.code === 'ENOENT') return cb()
|
|
return cb(err)
|
|
}
|
|
if (areIdentical(srcStat, destStat)) {
|
|
return cb(new Error(errMsg(src, dest, funcName)))
|
|
}
|
|
return checkParentPaths(src, srcStat, destParent, funcName, cb)
|
|
}
|
|
if (nodeSupportsBigInt) fs.stat(destParent, { bigint: true }, callback)
|
|
else fs.stat(destParent, callback)
|
|
}
|
|
|
|
function checkParentPathsSync (src, srcStat, dest, funcName) {
|
|
const srcParent = path.resolve(path.dirname(src))
|
|
const destParent = path.resolve(path.dirname(dest))
|
|
if (destParent === srcParent || destParent === path.parse(destParent).root) return
|
|
let destStat
|
|
try {
|
|
destStat = statSync(destParent)
|
|
} catch (err) {
|
|
if (err.code === 'ENOENT') return
|
|
throw err
|
|
}
|
|
if (areIdentical(srcStat, destStat)) {
|
|
throw new Error(errMsg(src, dest, funcName))
|
|
}
|
|
return checkParentPathsSync(src, srcStat, destParent, funcName)
|
|
}
|
|
|
|
function areIdentical (srcStat, destStat) {
|
|
if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) {
|
|
if (nodeSupportsBigInt || destStat.ino < Number.MAX_SAFE_INTEGER) {
|
|
// definitive answer
|
|
return true
|
|
}
|
|
// Use additional heuristics if we can't use 'bigint'.
|
|
// Different 'ino' could be represented the same if they are >= Number.MAX_SAFE_INTEGER
|
|
// See issue 657
|
|
if (destStat.size === srcStat.size &&
|
|
destStat.mode === srcStat.mode &&
|
|
destStat.nlink === srcStat.nlink &&
|
|
destStat.atimeMs === srcStat.atimeMs &&
|
|
destStat.mtimeMs === srcStat.mtimeMs &&
|
|
destStat.ctimeMs === srcStat.ctimeMs &&
|
|
destStat.birthtimeMs === srcStat.birthtimeMs) {
|
|
// heuristic answer
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
// return true if dest is a subdir of src, otherwise false.
|
|
// It only checks the path strings.
|
|
function isSrcSubdir (src, dest) {
|
|
const srcArr = path.resolve(src).split(path.sep).filter(i => i)
|
|
const destArr = path.resolve(dest).split(path.sep).filter(i => i)
|
|
return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true)
|
|
}
|
|
|
|
function errMsg (src, dest, funcName) {
|
|
return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.`
|
|
}
|
|
|
|
module.exports = {
|
|
checkPaths,
|
|
checkPathsSync,
|
|
checkParentPaths,
|
|
checkParentPathsSync,
|
|
isSrcSubdir
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2548:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
|
|
function utimesMillis (path, atime, mtime, callback) {
|
|
// if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)
|
|
fs.open(path, 'r+', (err, fd) => {
|
|
if (err) return callback(err)
|
|
fs.futimes(fd, atime, mtime, futimesErr => {
|
|
fs.close(fd, closeErr => {
|
|
if (callback) callback(futimesErr || closeErr)
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
function utimesMillisSync (path, atime, mtime) {
|
|
const fd = fs.openSync(path, 'r+')
|
|
fs.futimesSync(fd, atime, mtime)
|
|
return fs.closeSync(fd)
|
|
}
|
|
|
|
module.exports = {
|
|
utimesMillis,
|
|
utimesMillisSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 746:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
exports.E = function (fn) {
|
|
return Object.defineProperty(function (...args) {
|
|
if (typeof args[args.length - 1] === 'function') fn.apply(this, args)
|
|
else {
|
|
return new Promise((resolve, reject) => {
|
|
fn.call(
|
|
this,
|
|
...args,
|
|
(err, res) => (err != null) ? reject(err) : resolve(res)
|
|
)
|
|
})
|
|
}
|
|
}, 'name', { value: fn.name })
|
|
}
|
|
|
|
exports.p = function (fn) {
|
|
return Object.defineProperty(function (...args) {
|
|
const cb = args[args.length - 1]
|
|
if (typeof cb !== 'function') return fn.apply(this, args)
|
|
else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb)
|
|
}, 'name', { value: fn.name })
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7356:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
module.exports = clone
|
|
|
|
function clone (obj) {
|
|
if (obj === null || typeof obj !== 'object')
|
|
return obj
|
|
|
|
if (obj instanceof Object)
|
|
var copy = { __proto__: obj.__proto__ }
|
|
else
|
|
var copy = Object.create(null)
|
|
|
|
Object.getOwnPropertyNames(obj).forEach(function (key) {
|
|
Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))
|
|
})
|
|
|
|
return copy
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7758:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var fs = __nccwpck_require__(5747)
|
|
var polyfills = __nccwpck_require__(263)
|
|
var legacy = __nccwpck_require__(3086)
|
|
var clone = __nccwpck_require__(7356)
|
|
|
|
var util = __nccwpck_require__(1669)
|
|
|
|
/* istanbul ignore next - node 0.x polyfill */
|
|
var gracefulQueue
|
|
var previousSymbol
|
|
|
|
/* istanbul ignore else - node 0.x polyfill */
|
|
if (typeof Symbol === 'function' && typeof Symbol.for === 'function') {
|
|
gracefulQueue = Symbol.for('graceful-fs.queue')
|
|
// This is used in testing by future versions
|
|
previousSymbol = Symbol.for('graceful-fs.previous')
|
|
} else {
|
|
gracefulQueue = '___graceful-fs.queue'
|
|
previousSymbol = '___graceful-fs.previous'
|
|
}
|
|
|
|
function noop () {}
|
|
|
|
function publishQueue(context, queue) {
|
|
Object.defineProperty(context, gracefulQueue, {
|
|
get: function() {
|
|
return queue
|
|
}
|
|
})
|
|
}
|
|
|
|
var debug = noop
|
|
if (util.debuglog)
|
|
debug = util.debuglog('gfs4')
|
|
else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || ''))
|
|
debug = function() {
|
|
var m = util.format.apply(util, arguments)
|
|
m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ')
|
|
console.error(m)
|
|
}
|
|
|
|
// Once time initialization
|
|
if (!fs[gracefulQueue]) {
|
|
// This queue can be shared by multiple loaded instances
|
|
var queue = global[gracefulQueue] || []
|
|
publishQueue(fs, queue)
|
|
|
|
// Patch fs.close/closeSync to shared queue version, because we need
|
|
// to retry() whenever a close happens *anywhere* in the program.
|
|
// This is essential when multiple graceful-fs instances are
|
|
// in play at the same time.
|
|
fs.close = (function (fs$close) {
|
|
function close (fd, cb) {
|
|
return fs$close.call(fs, fd, function (err) {
|
|
// This function uses the graceful-fs shared queue
|
|
if (!err) {
|
|
retry()
|
|
}
|
|
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
})
|
|
}
|
|
|
|
Object.defineProperty(close, previousSymbol, {
|
|
value: fs$close
|
|
})
|
|
return close
|
|
})(fs.close)
|
|
|
|
fs.closeSync = (function (fs$closeSync) {
|
|
function closeSync (fd) {
|
|
// This function uses the graceful-fs shared queue
|
|
fs$closeSync.apply(fs, arguments)
|
|
retry()
|
|
}
|
|
|
|
Object.defineProperty(closeSync, previousSymbol, {
|
|
value: fs$closeSync
|
|
})
|
|
return closeSync
|
|
})(fs.closeSync)
|
|
|
|
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) {
|
|
process.on('exit', function() {
|
|
debug(fs[gracefulQueue])
|
|
__nccwpck_require__(2357).equal(fs[gracefulQueue].length, 0)
|
|
})
|
|
}
|
|
}
|
|
|
|
if (!global[gracefulQueue]) {
|
|
publishQueue(global, fs[gracefulQueue]);
|
|
}
|
|
|
|
module.exports = patch(clone(fs))
|
|
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {
|
|
module.exports = patch(fs)
|
|
fs.__patched = true;
|
|
}
|
|
|
|
function patch (fs) {
|
|
// Everything that references the open() function needs to be in here
|
|
polyfills(fs)
|
|
fs.gracefulify = patch
|
|
|
|
fs.createReadStream = createReadStream
|
|
fs.createWriteStream = createWriteStream
|
|
var fs$readFile = fs.readFile
|
|
fs.readFile = readFile
|
|
function readFile (path, options, cb) {
|
|
if (typeof options === 'function')
|
|
cb = options, options = null
|
|
|
|
return go$readFile(path, options, cb)
|
|
|
|
function go$readFile (path, options, cb) {
|
|
return fs$readFile(path, options, function (err) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$readFile, [path, options, cb]])
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
retry()
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
var fs$writeFile = fs.writeFile
|
|
fs.writeFile = writeFile
|
|
function writeFile (path, data, options, cb) {
|
|
if (typeof options === 'function')
|
|
cb = options, options = null
|
|
|
|
return go$writeFile(path, data, options, cb)
|
|
|
|
function go$writeFile (path, data, options, cb) {
|
|
return fs$writeFile(path, data, options, function (err) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$writeFile, [path, data, options, cb]])
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
retry()
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
var fs$appendFile = fs.appendFile
|
|
if (fs$appendFile)
|
|
fs.appendFile = appendFile
|
|
function appendFile (path, data, options, cb) {
|
|
if (typeof options === 'function')
|
|
cb = options, options = null
|
|
|
|
return go$appendFile(path, data, options, cb)
|
|
|
|
function go$appendFile (path, data, options, cb) {
|
|
return fs$appendFile(path, data, options, function (err) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$appendFile, [path, data, options, cb]])
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
retry()
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
var fs$readdir = fs.readdir
|
|
fs.readdir = readdir
|
|
function readdir (path, options, cb) {
|
|
var args = [path]
|
|
if (typeof options !== 'function') {
|
|
args.push(options)
|
|
} else {
|
|
cb = options
|
|
}
|
|
args.push(go$readdir$cb)
|
|
|
|
return go$readdir(args)
|
|
|
|
function go$readdir$cb (err, files) {
|
|
if (files && files.sort)
|
|
files.sort()
|
|
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$readdir, [args]])
|
|
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
retry()
|
|
}
|
|
}
|
|
}
|
|
|
|
function go$readdir (args) {
|
|
return fs$readdir.apply(fs, args)
|
|
}
|
|
|
|
if (process.version.substr(0, 4) === 'v0.8') {
|
|
var legStreams = legacy(fs)
|
|
ReadStream = legStreams.ReadStream
|
|
WriteStream = legStreams.WriteStream
|
|
}
|
|
|
|
var fs$ReadStream = fs.ReadStream
|
|
if (fs$ReadStream) {
|
|
ReadStream.prototype = Object.create(fs$ReadStream.prototype)
|
|
ReadStream.prototype.open = ReadStream$open
|
|
}
|
|
|
|
var fs$WriteStream = fs.WriteStream
|
|
if (fs$WriteStream) {
|
|
WriteStream.prototype = Object.create(fs$WriteStream.prototype)
|
|
WriteStream.prototype.open = WriteStream$open
|
|
}
|
|
|
|
Object.defineProperty(fs, 'ReadStream', {
|
|
get: function () {
|
|
return ReadStream
|
|
},
|
|
set: function (val) {
|
|
ReadStream = val
|
|
},
|
|
enumerable: true,
|
|
configurable: true
|
|
})
|
|
Object.defineProperty(fs, 'WriteStream', {
|
|
get: function () {
|
|
return WriteStream
|
|
},
|
|
set: function (val) {
|
|
WriteStream = val
|
|
},
|
|
enumerable: true,
|
|
configurable: true
|
|
})
|
|
|
|
// legacy names
|
|
var FileReadStream = ReadStream
|
|
Object.defineProperty(fs, 'FileReadStream', {
|
|
get: function () {
|
|
return FileReadStream
|
|
},
|
|
set: function (val) {
|
|
FileReadStream = val
|
|
},
|
|
enumerable: true,
|
|
configurable: true
|
|
})
|
|
var FileWriteStream = WriteStream
|
|
Object.defineProperty(fs, 'FileWriteStream', {
|
|
get: function () {
|
|
return FileWriteStream
|
|
},
|
|
set: function (val) {
|
|
FileWriteStream = val
|
|
},
|
|
enumerable: true,
|
|
configurable: true
|
|
})
|
|
|
|
function ReadStream (path, options) {
|
|
if (this instanceof ReadStream)
|
|
return fs$ReadStream.apply(this, arguments), this
|
|
else
|
|
return ReadStream.apply(Object.create(ReadStream.prototype), arguments)
|
|
}
|
|
|
|
function ReadStream$open () {
|
|
var that = this
|
|
open(that.path, that.flags, that.mode, function (err, fd) {
|
|
if (err) {
|
|
if (that.autoClose)
|
|
that.destroy()
|
|
|
|
that.emit('error', err)
|
|
} else {
|
|
that.fd = fd
|
|
that.emit('open', fd)
|
|
that.read()
|
|
}
|
|
})
|
|
}
|
|
|
|
function WriteStream (path, options) {
|
|
if (this instanceof WriteStream)
|
|
return fs$WriteStream.apply(this, arguments), this
|
|
else
|
|
return WriteStream.apply(Object.create(WriteStream.prototype), arguments)
|
|
}
|
|
|
|
function WriteStream$open () {
|
|
var that = this
|
|
open(that.path, that.flags, that.mode, function (err, fd) {
|
|
if (err) {
|
|
that.destroy()
|
|
that.emit('error', err)
|
|
} else {
|
|
that.fd = fd
|
|
that.emit('open', fd)
|
|
}
|
|
})
|
|
}
|
|
|
|
function createReadStream (path, options) {
|
|
return new fs.ReadStream(path, options)
|
|
}
|
|
|
|
function createWriteStream (path, options) {
|
|
return new fs.WriteStream(path, options)
|
|
}
|
|
|
|
var fs$open = fs.open
|
|
fs.open = open
|
|
function open (path, flags, mode, cb) {
|
|
if (typeof mode === 'function')
|
|
cb = mode, mode = null
|
|
|
|
return go$open(path, flags, mode, cb)
|
|
|
|
function go$open (path, flags, mode, cb) {
|
|
return fs$open(path, flags, mode, function (err, fd) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$open, [path, flags, mode, cb]])
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
retry()
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
return fs
|
|
}
|
|
|
|
function enqueue (elem) {
|
|
debug('ENQUEUE', elem[0].name, elem[1])
|
|
fs[gracefulQueue].push(elem)
|
|
}
|
|
|
|
function retry () {
|
|
var elem = fs[gracefulQueue].shift()
|
|
if (elem) {
|
|
debug('RETRY', elem[0].name, elem[1])
|
|
elem[0].apply(null, elem[1])
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3086:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var Stream = __nccwpck_require__(2413).Stream
|
|
|
|
module.exports = legacy
|
|
|
|
function legacy (fs) {
|
|
return {
|
|
ReadStream: ReadStream,
|
|
WriteStream: WriteStream
|
|
}
|
|
|
|
function ReadStream (path, options) {
|
|
if (!(this instanceof ReadStream)) return new ReadStream(path, options);
|
|
|
|
Stream.call(this);
|
|
|
|
var self = this;
|
|
|
|
this.path = path;
|
|
this.fd = null;
|
|
this.readable = true;
|
|
this.paused = false;
|
|
|
|
this.flags = 'r';
|
|
this.mode = 438; /*=0666*/
|
|
this.bufferSize = 64 * 1024;
|
|
|
|
options = options || {};
|
|
|
|
// Mixin options into this
|
|
var keys = Object.keys(options);
|
|
for (var index = 0, length = keys.length; index < length; index++) {
|
|
var key = keys[index];
|
|
this[key] = options[key];
|
|
}
|
|
|
|
if (this.encoding) this.setEncoding(this.encoding);
|
|
|
|
if (this.start !== undefined) {
|
|
if ('number' !== typeof this.start) {
|
|
throw TypeError('start must be a Number');
|
|
}
|
|
if (this.end === undefined) {
|
|
this.end = Infinity;
|
|
} else if ('number' !== typeof this.end) {
|
|
throw TypeError('end must be a Number');
|
|
}
|
|
|
|
if (this.start > this.end) {
|
|
throw new Error('start must be <= end');
|
|
}
|
|
|
|
this.pos = this.start;
|
|
}
|
|
|
|
if (this.fd !== null) {
|
|
process.nextTick(function() {
|
|
self._read();
|
|
});
|
|
return;
|
|
}
|
|
|
|
fs.open(this.path, this.flags, this.mode, function (err, fd) {
|
|
if (err) {
|
|
self.emit('error', err);
|
|
self.readable = false;
|
|
return;
|
|
}
|
|
|
|
self.fd = fd;
|
|
self.emit('open', fd);
|
|
self._read();
|
|
})
|
|
}
|
|
|
|
function WriteStream (path, options) {
|
|
if (!(this instanceof WriteStream)) return new WriteStream(path, options);
|
|
|
|
Stream.call(this);
|
|
|
|
this.path = path;
|
|
this.fd = null;
|
|
this.writable = true;
|
|
|
|
this.flags = 'w';
|
|
this.encoding = 'binary';
|
|
this.mode = 438; /*=0666*/
|
|
this.bytesWritten = 0;
|
|
|
|
options = options || {};
|
|
|
|
// Mixin options into this
|
|
var keys = Object.keys(options);
|
|
for (var index = 0, length = keys.length; index < length; index++) {
|
|
var key = keys[index];
|
|
this[key] = options[key];
|
|
}
|
|
|
|
if (this.start !== undefined) {
|
|
if ('number' !== typeof this.start) {
|
|
throw TypeError('start must be a Number');
|
|
}
|
|
if (this.start < 0) {
|
|
throw new Error('start must be >= zero');
|
|
}
|
|
|
|
this.pos = this.start;
|
|
}
|
|
|
|
this.busy = false;
|
|
this._queue = [];
|
|
|
|
if (this.fd === null) {
|
|
this._open = fs.open;
|
|
this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);
|
|
this.flush();
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 263:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var constants = __nccwpck_require__(7619)
|
|
|
|
var origCwd = process.cwd
|
|
var cwd = null
|
|
|
|
var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform
|
|
|
|
process.cwd = function() {
|
|
if (!cwd)
|
|
cwd = origCwd.call(process)
|
|
return cwd
|
|
}
|
|
try {
|
|
process.cwd()
|
|
} catch (er) {}
|
|
|
|
var chdir = process.chdir
|
|
process.chdir = function(d) {
|
|
cwd = null
|
|
chdir.call(process, d)
|
|
}
|
|
|
|
module.exports = patch
|
|
|
|
function patch (fs) {
|
|
// (re-)implement some things that are known busted or missing.
|
|
|
|
// lchmod, broken prior to 0.6.2
|
|
// back-port the fix here.
|
|
if (constants.hasOwnProperty('O_SYMLINK') &&
|
|
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
|
|
patchLchmod(fs)
|
|
}
|
|
|
|
// lutimes implementation, or no-op
|
|
if (!fs.lutimes) {
|
|
patchLutimes(fs)
|
|
}
|
|
|
|
// https://github.com/isaacs/node-graceful-fs/issues/4
|
|
// Chown should not fail on einval or eperm if non-root.
|
|
// It should not fail on enosys ever, as this just indicates
|
|
// that a fs doesn't support the intended operation.
|
|
|
|
fs.chown = chownFix(fs.chown)
|
|
fs.fchown = chownFix(fs.fchown)
|
|
fs.lchown = chownFix(fs.lchown)
|
|
|
|
fs.chmod = chmodFix(fs.chmod)
|
|
fs.fchmod = chmodFix(fs.fchmod)
|
|
fs.lchmod = chmodFix(fs.lchmod)
|
|
|
|
fs.chownSync = chownFixSync(fs.chownSync)
|
|
fs.fchownSync = chownFixSync(fs.fchownSync)
|
|
fs.lchownSync = chownFixSync(fs.lchownSync)
|
|
|
|
fs.chmodSync = chmodFixSync(fs.chmodSync)
|
|
fs.fchmodSync = chmodFixSync(fs.fchmodSync)
|
|
fs.lchmodSync = chmodFixSync(fs.lchmodSync)
|
|
|
|
fs.stat = statFix(fs.stat)
|
|
fs.fstat = statFix(fs.fstat)
|
|
fs.lstat = statFix(fs.lstat)
|
|
|
|
fs.statSync = statFixSync(fs.statSync)
|
|
fs.fstatSync = statFixSync(fs.fstatSync)
|
|
fs.lstatSync = statFixSync(fs.lstatSync)
|
|
|
|
// if lchmod/lchown do not exist, then make them no-ops
|
|
if (!fs.lchmod) {
|
|
fs.lchmod = function (path, mode, cb) {
|
|
if (cb) process.nextTick(cb)
|
|
}
|
|
fs.lchmodSync = function () {}
|
|
}
|
|
if (!fs.lchown) {
|
|
fs.lchown = function (path, uid, gid, cb) {
|
|
if (cb) process.nextTick(cb)
|
|
}
|
|
fs.lchownSync = function () {}
|
|
}
|
|
|
|
// on Windows, A/V software can lock the directory, causing this
|
|
// to fail with an EACCES or EPERM if the directory contains newly
|
|
// created files. Try again on failure, for up to 60 seconds.
|
|
|
|
// Set the timeout this long because some Windows Anti-Virus, such as Parity
|
|
// bit9, may lock files for up to a minute, causing npm package install
|
|
// failures. Also, take care to yield the scheduler. Windows scheduling gives
|
|
// CPU to a busy looping process, which can cause the program causing the lock
|
|
// contention to be starved of CPU by node, so the contention doesn't resolve.
|
|
if (platform === "win32") {
|
|
fs.rename = (function (fs$rename) { return function (from, to, cb) {
|
|
var start = Date.now()
|
|
var backoff = 0;
|
|
fs$rename(from, to, function CB (er) {
|
|
if (er
|
|
&& (er.code === "EACCES" || er.code === "EPERM")
|
|
&& Date.now() - start < 60000) {
|
|
setTimeout(function() {
|
|
fs.stat(to, function (stater, st) {
|
|
if (stater && stater.code === "ENOENT")
|
|
fs$rename(from, to, CB);
|
|
else
|
|
cb(er)
|
|
})
|
|
}, backoff)
|
|
if (backoff < 100)
|
|
backoff += 10;
|
|
return;
|
|
}
|
|
if (cb) cb(er)
|
|
})
|
|
}})(fs.rename)
|
|
}
|
|
|
|
// if read() returns EAGAIN, then just try it again.
|
|
fs.read = (function (fs$read) {
|
|
function read (fd, buffer, offset, length, position, callback_) {
|
|
var callback
|
|
if (callback_ && typeof callback_ === 'function') {
|
|
var eagCounter = 0
|
|
callback = function (er, _, __) {
|
|
if (er && er.code === 'EAGAIN' && eagCounter < 10) {
|
|
eagCounter ++
|
|
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
|
}
|
|
callback_.apply(this, arguments)
|
|
}
|
|
}
|
|
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
|
}
|
|
|
|
// This ensures `util.promisify` works as it does for native `fs.read`.
|
|
read.__proto__ = fs$read
|
|
return read
|
|
})(fs.read)
|
|
|
|
fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
|
|
var eagCounter = 0
|
|
while (true) {
|
|
try {
|
|
return fs$readSync.call(fs, fd, buffer, offset, length, position)
|
|
} catch (er) {
|
|
if (er.code === 'EAGAIN' && eagCounter < 10) {
|
|
eagCounter ++
|
|
continue
|
|
}
|
|
throw er
|
|
}
|
|
}
|
|
}})(fs.readSync)
|
|
|
|
function patchLchmod (fs) {
|
|
fs.lchmod = function (path, mode, callback) {
|
|
fs.open( path
|
|
, constants.O_WRONLY | constants.O_SYMLINK
|
|
, mode
|
|
, function (err, fd) {
|
|
if (err) {
|
|
if (callback) callback(err)
|
|
return
|
|
}
|
|
// prefer to return the chmod error, if one occurs,
|
|
// but still try to close, and report closing errors if they occur.
|
|
fs.fchmod(fd, mode, function (err) {
|
|
fs.close(fd, function(err2) {
|
|
if (callback) callback(err || err2)
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
fs.lchmodSync = function (path, mode) {
|
|
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
|
|
|
|
// prefer to return the chmod error, if one occurs,
|
|
// but still try to close, and report closing errors if they occur.
|
|
var threw = true
|
|
var ret
|
|
try {
|
|
ret = fs.fchmodSync(fd, mode)
|
|
threw = false
|
|
} finally {
|
|
if (threw) {
|
|
try {
|
|
fs.closeSync(fd)
|
|
} catch (er) {}
|
|
} else {
|
|
fs.closeSync(fd)
|
|
}
|
|
}
|
|
return ret
|
|
}
|
|
}
|
|
|
|
function patchLutimes (fs) {
|
|
if (constants.hasOwnProperty("O_SYMLINK")) {
|
|
fs.lutimes = function (path, at, mt, cb) {
|
|
fs.open(path, constants.O_SYMLINK, function (er, fd) {
|
|
if (er) {
|
|
if (cb) cb(er)
|
|
return
|
|
}
|
|
fs.futimes(fd, at, mt, function (er) {
|
|
fs.close(fd, function (er2) {
|
|
if (cb) cb(er || er2)
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
fs.lutimesSync = function (path, at, mt) {
|
|
var fd = fs.openSync(path, constants.O_SYMLINK)
|
|
var ret
|
|
var threw = true
|
|
try {
|
|
ret = fs.futimesSync(fd, at, mt)
|
|
threw = false
|
|
} finally {
|
|
if (threw) {
|
|
try {
|
|
fs.closeSync(fd)
|
|
} catch (er) {}
|
|
} else {
|
|
fs.closeSync(fd)
|
|
}
|
|
}
|
|
return ret
|
|
}
|
|
|
|
} else {
|
|
fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
|
|
fs.lutimesSync = function () {}
|
|
}
|
|
}
|
|
|
|
function chmodFix (orig) {
|
|
if (!orig) return orig
|
|
return function (target, mode, cb) {
|
|
return orig.call(fs, target, mode, function (er) {
|
|
if (chownErOk(er)) er = null
|
|
if (cb) cb.apply(this, arguments)
|
|
})
|
|
}
|
|
}
|
|
|
|
function chmodFixSync (orig) {
|
|
if (!orig) return orig
|
|
return function (target, mode) {
|
|
try {
|
|
return orig.call(fs, target, mode)
|
|
} catch (er) {
|
|
if (!chownErOk(er)) throw er
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
function chownFix (orig) {
|
|
if (!orig) return orig
|
|
return function (target, uid, gid, cb) {
|
|
return orig.call(fs, target, uid, gid, function (er) {
|
|
if (chownErOk(er)) er = null
|
|
if (cb) cb.apply(this, arguments)
|
|
})
|
|
}
|
|
}
|
|
|
|
function chownFixSync (orig) {
|
|
if (!orig) return orig
|
|
return function (target, uid, gid) {
|
|
try {
|
|
return orig.call(fs, target, uid, gid)
|
|
} catch (er) {
|
|
if (!chownErOk(er)) throw er
|
|
}
|
|
}
|
|
}
|
|
|
|
function statFix (orig) {
|
|
if (!orig) return orig
|
|
// Older versions of Node erroneously returned signed integers for
|
|
// uid + gid.
|
|
return function (target, options, cb) {
|
|
if (typeof options === 'function') {
|
|
cb = options
|
|
options = null
|
|
}
|
|
function callback (er, stats) {
|
|
if (stats) {
|
|
if (stats.uid < 0) stats.uid += 0x100000000
|
|
if (stats.gid < 0) stats.gid += 0x100000000
|
|
}
|
|
if (cb) cb.apply(this, arguments)
|
|
}
|
|
return options ? orig.call(fs, target, options, callback)
|
|
: orig.call(fs, target, callback)
|
|
}
|
|
}
|
|
|
|
function statFixSync (orig) {
|
|
if (!orig) return orig
|
|
// Older versions of Node erroneously returned signed integers for
|
|
// uid + gid.
|
|
return function (target, options) {
|
|
var stats = options ? orig.call(fs, target, options)
|
|
: orig.call(fs, target)
|
|
if (stats.uid < 0) stats.uid += 0x100000000
|
|
if (stats.gid < 0) stats.gid += 0x100000000
|
|
return stats;
|
|
}
|
|
}
|
|
|
|
// ENOSYS means that the fs doesn't support the op. Just ignore
|
|
// that, because it doesn't matter.
|
|
//
|
|
// if there's no getuid, or if getuid() is something other
|
|
// than 0, and the error is EINVAL or EPERM, then just ignore
|
|
// it.
|
|
//
|
|
// This specific case is a silent failure in cp, install, tar,
|
|
// and most other unix tools that manage permissions.
|
|
//
|
|
// When running as root, or if other types of errors are
|
|
// encountered, then it's strict.
|
|
function chownErOk (er) {
|
|
if (!er)
|
|
return true
|
|
|
|
if (er.code === "ENOSYS")
|
|
return true
|
|
|
|
var nonroot = !process.getuid || process.getuid() !== 0
|
|
if (nonroot) {
|
|
if (er.code === "EINVAL" || er.code === "EPERM")
|
|
return true
|
|
}
|
|
|
|
return false
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6160:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
let _fs
|
|
try {
|
|
_fs = __nccwpck_require__(7758)
|
|
} catch (_) {
|
|
_fs = __nccwpck_require__(5747)
|
|
}
|
|
const universalify = __nccwpck_require__(7133)
|
|
const { stringify, stripBom } = __nccwpck_require__(5902)
|
|
|
|
async function _readFile (file, options = {}) {
|
|
if (typeof options === 'string') {
|
|
options = { encoding: options }
|
|
}
|
|
|
|
const fs = options.fs || _fs
|
|
|
|
const shouldThrow = 'throws' in options ? options.throws : true
|
|
|
|
let data = await universalify.fromCallback(fs.readFile)(file, options)
|
|
|
|
data = stripBom(data)
|
|
|
|
let obj
|
|
try {
|
|
obj = JSON.parse(data, options ? options.reviver : null)
|
|
} catch (err) {
|
|
if (shouldThrow) {
|
|
err.message = `${file}: ${err.message}`
|
|
throw err
|
|
} else {
|
|
return null
|
|
}
|
|
}
|
|
|
|
return obj
|
|
}
|
|
|
|
const readFile = universalify.fromPromise(_readFile)
|
|
|
|
function readFileSync (file, options = {}) {
|
|
if (typeof options === 'string') {
|
|
options = { encoding: options }
|
|
}
|
|
|
|
const fs = options.fs || _fs
|
|
|
|
const shouldThrow = 'throws' in options ? options.throws : true
|
|
|
|
try {
|
|
let content = fs.readFileSync(file, options)
|
|
content = stripBom(content)
|
|
return JSON.parse(content, options.reviver)
|
|
} catch (err) {
|
|
if (shouldThrow) {
|
|
err.message = `${file}: ${err.message}`
|
|
throw err
|
|
} else {
|
|
return null
|
|
}
|
|
}
|
|
}
|
|
|
|
async function _writeFile (file, obj, options = {}) {
|
|
const fs = options.fs || _fs
|
|
|
|
const str = stringify(obj, options)
|
|
|
|
await universalify.fromCallback(fs.writeFile)(file, str, options)
|
|
}
|
|
|
|
const writeFile = universalify.fromPromise(_writeFile)
|
|
|
|
function writeFileSync (file, obj, options = {}) {
|
|
const fs = options.fs || _fs
|
|
|
|
const str = stringify(obj, options)
|
|
// not sure if fs.writeFileSync returns anything, but just in case
|
|
return fs.writeFileSync(file, str, options)
|
|
}
|
|
|
|
const jsonfile = {
|
|
readFile,
|
|
readFileSync,
|
|
writeFile,
|
|
writeFileSync
|
|
}
|
|
|
|
module.exports = jsonfile
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7133:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
exports.fromCallback = function (fn) {
|
|
return Object.defineProperty(function (...args) {
|
|
if (typeof args[args.length - 1] === 'function') fn.apply(this, args)
|
|
else {
|
|
return new Promise((resolve, reject) => {
|
|
fn.call(
|
|
this,
|
|
...args,
|
|
(err, res) => (err != null) ? reject(err) : resolve(res)
|
|
)
|
|
})
|
|
}
|
|
}, 'name', { value: fn.name })
|
|
}
|
|
|
|
exports.fromPromise = function (fn) {
|
|
return Object.defineProperty(function (...args) {
|
|
const cb = args[args.length - 1]
|
|
if (typeof cb !== 'function') return fn.apply(this, args)
|
|
else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb)
|
|
}, 'name', { value: fn.name })
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5902:
|
|
/***/ ((module) => {
|
|
|
|
function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) {
|
|
const EOF = finalEOL ? EOL : ''
|
|
const str = JSON.stringify(obj, replacer, spaces)
|
|
|
|
return str.replace(/\n/g, EOL) + EOF
|
|
}
|
|
|
|
function stripBom (content) {
|
|
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified
|
|
if (Buffer.isBuffer(content)) content = content.toString('utf8')
|
|
return content.replace(/^\uFEFF/, '')
|
|
}
|
|
|
|
module.exports = { stringify, stripBom }
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4294:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
module.exports = __nccwpck_require__(4219);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4219:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
var net = __nccwpck_require__(1631);
|
|
var tls = __nccwpck_require__(4016);
|
|
var http = __nccwpck_require__(8605);
|
|
var https = __nccwpck_require__(7211);
|
|
var events = __nccwpck_require__(8614);
|
|
var assert = __nccwpck_require__(2357);
|
|
var util = __nccwpck_require__(1669);
|
|
|
|
|
|
exports.httpOverHttp = httpOverHttp;
|
|
exports.httpsOverHttp = httpsOverHttp;
|
|
exports.httpOverHttps = httpOverHttps;
|
|
exports.httpsOverHttps = httpsOverHttps;
|
|
|
|
|
|
function httpOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
function httpOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
|
|
function TunnelingAgent(options) {
|
|
var self = this;
|
|
self.options = options || {};
|
|
self.proxyOptions = self.options.proxy || {};
|
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
|
self.requests = [];
|
|
self.sockets = [];
|
|
|
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
|
var options = toOptions(host, port, localAddress);
|
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
|
var pending = self.requests[i];
|
|
if (pending.host === options.host && pending.port === options.port) {
|
|
// Detect the request to connect same origin server,
|
|
// reuse the connection.
|
|
self.requests.splice(i, 1);
|
|
pending.request.onSocket(socket);
|
|
return;
|
|
}
|
|
}
|
|
socket.destroy();
|
|
self.removeSocket(socket);
|
|
});
|
|
}
|
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
|
|
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
|
var self = this;
|
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
|
|
|
if (self.sockets.length >= this.maxSockets) {
|
|
// We are over limit so we'll add it to the queue.
|
|
self.requests.push(options);
|
|
return;
|
|
}
|
|
|
|
// If we are under maxSockets create a new one.
|
|
self.createSocket(options, function(socket) {
|
|
socket.on('free', onFree);
|
|
socket.on('close', onCloseOrRemove);
|
|
socket.on('agentRemove', onCloseOrRemove);
|
|
req.onSocket(socket);
|
|
|
|
function onFree() {
|
|
self.emit('free', socket, options);
|
|
}
|
|
|
|
function onCloseOrRemove(err) {
|
|
self.removeSocket(socket);
|
|
socket.removeListener('free', onFree);
|
|
socket.removeListener('close', onCloseOrRemove);
|
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
|
}
|
|
});
|
|
};
|
|
|
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
|
var self = this;
|
|
var placeholder = {};
|
|
self.sockets.push(placeholder);
|
|
|
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
|
method: 'CONNECT',
|
|
path: options.host + ':' + options.port,
|
|
agent: false,
|
|
headers: {
|
|
host: options.host + ':' + options.port
|
|
}
|
|
});
|
|
if (options.localAddress) {
|
|
connectOptions.localAddress = options.localAddress;
|
|
}
|
|
if (connectOptions.proxyAuth) {
|
|
connectOptions.headers = connectOptions.headers || {};
|
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
|
}
|
|
|
|
debug('making CONNECT request');
|
|
var connectReq = self.request(connectOptions);
|
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
|
connectReq.once('response', onResponse); // for v0.6
|
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
|
connectReq.once('error', onError);
|
|
connectReq.end();
|
|
|
|
function onResponse(res) {
|
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
|
res.upgrade = true;
|
|
}
|
|
|
|
function onUpgrade(res, socket, head) {
|
|
// Hacky.
|
|
process.nextTick(function() {
|
|
onConnect(res, socket, head);
|
|
});
|
|
}
|
|
|
|
function onConnect(res, socket, head) {
|
|
connectReq.removeAllListeners();
|
|
socket.removeAllListeners();
|
|
|
|
if (res.statusCode !== 200) {
|
|
debug('tunneling socket could not be established, statusCode=%d',
|
|
res.statusCode);
|
|
socket.destroy();
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'statusCode=' + res.statusCode);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
if (head.length > 0) {
|
|
debug('got illegal response body from proxy');
|
|
socket.destroy();
|
|
var error = new Error('got illegal response body from proxy');
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
debug('tunneling connection has established');
|
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
|
return cb(socket);
|
|
}
|
|
|
|
function onError(cause) {
|
|
connectReq.removeAllListeners();
|
|
|
|
debug('tunneling socket could not be established, cause=%s\n',
|
|
cause.message, cause.stack);
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'cause=' + cause.message);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
}
|
|
};
|
|
|
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
|
var pos = this.sockets.indexOf(socket)
|
|
if (pos === -1) {
|
|
return;
|
|
}
|
|
this.sockets.splice(pos, 1);
|
|
|
|
var pending = this.requests.shift();
|
|
if (pending) {
|
|
// If we have pending requests and a socket gets closed a new one
|
|
// needs to be created to take over in the pool for the one that closed.
|
|
this.createSocket(pending, function(socket) {
|
|
pending.request.onSocket(socket);
|
|
});
|
|
}
|
|
};
|
|
|
|
function createSecureSocket(options, cb) {
|
|
var self = this;
|
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
|
var hostHeader = options.request.getHeader('host');
|
|
var tlsOptions = mergeOptions({}, self.options, {
|
|
socket: socket,
|
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
|
});
|
|
|
|
// 0 is dummy port for v0.6
|
|
var secureSocket = tls.connect(0, tlsOptions);
|
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
|
cb(secureSocket);
|
|
});
|
|
}
|
|
|
|
|
|
function toOptions(host, port, localAddress) {
|
|
if (typeof host === 'string') { // since v0.10
|
|
return {
|
|
host: host,
|
|
port: port,
|
|
localAddress: localAddress
|
|
};
|
|
}
|
|
return host; // for v0.11 or later
|
|
}
|
|
|
|
function mergeOptions(target) {
|
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
|
var overrides = arguments[i];
|
|
if (typeof overrides === 'object') {
|
|
var keys = Object.keys(overrides);
|
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
|
var k = keys[j];
|
|
if (overrides[k] !== undefined) {
|
|
target[k] = overrides[k];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return target;
|
|
}
|
|
|
|
|
|
var debug;
|
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
|
debug = function() {
|
|
var args = Array.prototype.slice.call(arguments);
|
|
if (typeof args[0] === 'string') {
|
|
args[0] = 'TUNNEL: ' + args[0];
|
|
} else {
|
|
args.unshift('TUNNEL:');
|
|
}
|
|
console.error.apply(console, args);
|
|
}
|
|
} else {
|
|
debug = function() {};
|
|
}
|
|
exports.debug = debug; // for test
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2707:
|
|
/***/ ((module) => {
|
|
|
|
/**
|
|
* Convert array of 16 byte values to UUID string format of the form:
|
|
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
|
*/
|
|
var byteToHex = [];
|
|
for (var i = 0; i < 256; ++i) {
|
|
byteToHex[i] = (i + 0x100).toString(16).substr(1);
|
|
}
|
|
|
|
function bytesToUuid(buf, offset) {
|
|
var i = offset || 0;
|
|
var bth = byteToHex;
|
|
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
|
|
return ([
|
|
bth[buf[i++]], bth[buf[i++]],
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]],
|
|
bth[buf[i++]], bth[buf[i++]],
|
|
bth[buf[i++]], bth[buf[i++]]
|
|
]).join('');
|
|
}
|
|
|
|
module.exports = bytesToUuid;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5859:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
// Unique ID creation requires a high quality random # generator. In node.js
|
|
// this is pretty straight-forward - we use the crypto API.
|
|
|
|
var crypto = __nccwpck_require__(6417);
|
|
|
|
module.exports = function nodeRNG() {
|
|
return crypto.randomBytes(16);
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 824:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var rng = __nccwpck_require__(5859);
|
|
var bytesToUuid = __nccwpck_require__(2707);
|
|
|
|
function v4(options, buf, offset) {
|
|
var i = buf && offset || 0;
|
|
|
|
if (typeof(options) == 'string') {
|
|
buf = options === 'binary' ? new Array(16) : null;
|
|
options = null;
|
|
}
|
|
options = options || {};
|
|
|
|
var rnds = options.random || (options.rng || rng)();
|
|
|
|
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
|
|
rnds[6] = (rnds[6] & 0x0f) | 0x40;
|
|
rnds[8] = (rnds[8] & 0x3f) | 0x80;
|
|
|
|
// Copy bytes to buffer, if provided
|
|
if (buf) {
|
|
for (var ii = 0; ii < 16; ++ii) {
|
|
buf[i + ii] = rnds[ii];
|
|
}
|
|
}
|
|
|
|
return buf || bytesToUuid(rnds);
|
|
}
|
|
|
|
module.exports = v4;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 399:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const core = __importStar(__nccwpck_require__(2186));
|
|
const tc = __importStar(__nccwpck_require__(7784));
|
|
const exec = __importStar(__nccwpck_require__(1514));
|
|
const path = __importStar(__nccwpck_require__(5622));
|
|
const fs = __importStar(__nccwpck_require__(5747));
|
|
const fse = __importStar(__nccwpck_require__(5630));
|
|
const os = __importStar(__nccwpck_require__(2087));
|
|
const CMDLINE_TOOLS_VERSION = '3.0';
|
|
const COMMANDLINE_TOOLS_VERSION = '6858069';
|
|
const COMMANDLINE_TOOLS_WIN_URL = `https://dl.google.com/android/repository/commandlinetools-win-${COMMANDLINE_TOOLS_VERSION}_latest.zip`;
|
|
const COMMANDLINE_TOOLS_MAC_URL = `https://dl.google.com/android/repository/commandlinetools-mac-${COMMANDLINE_TOOLS_VERSION}_latest.zip`;
|
|
const COMMANDLINE_TOOLS_LIN_URL = `https://dl.google.com/android/repository/commandlinetools-linux-${COMMANDLINE_TOOLS_VERSION}_latest.zip`;
|
|
const HOME = os.homedir();
|
|
const ANDROID_HOME_DIR = path.join(HOME, '.android');
|
|
const ANDROID_HOME_SDK_DIR = path.join(ANDROID_HOME_DIR, 'sdk');
|
|
let ANDROID_SDK_ROOT = process.env['ANDROID_SDK_ROOT'] || ANDROID_HOME_SDK_DIR;
|
|
function getSdkManagerPath(cmdToolsVersion) {
|
|
return path.join(ANDROID_SDK_ROOT, 'cmdline-tools', cmdToolsVersion, 'bin', 'sdkmanager');
|
|
}
|
|
function findPreinstalledSdkManager() {
|
|
const result = { isFound: false, isCorrectVersion: false, exePath: '' };
|
|
// First try to find the version defined in CMDLINE_TOOLS_VERSION
|
|
result.exePath = getSdkManagerPath(CMDLINE_TOOLS_VERSION);
|
|
result.isFound = fs.existsSync(result.exePath);
|
|
if (result.isFound) {
|
|
result.isCorrectVersion = true;
|
|
return result;
|
|
}
|
|
// cmdline-tools could have a 'latest' version, but if it was installed 2 years ago
|
|
// it may not be 'latest' as of today
|
|
result.exePath = getSdkManagerPath('latest');
|
|
result.isFound = fs.existsSync(result.exePath);
|
|
if (result.isFound) {
|
|
return result;
|
|
}
|
|
result.exePath = '';
|
|
// Find whatever version is available in ANDROID_SDK_ROOT
|
|
const cmdlineToolsDir = path.join(ANDROID_SDK_ROOT, 'cmdline-tools');
|
|
const foundVersions = fs.existsSync(cmdlineToolsDir)
|
|
? fs.readdirSync(cmdlineToolsDir)
|
|
: [];
|
|
const foundVersionsFiltered = foundVersions.filter(obj => '.' !== obj && '..' !== obj);
|
|
// Sort by desc, to get 2.0 first, before 1.0
|
|
const foundVersionsSorted = foundVersionsFiltered.sort((a, b) => (a > b ? -1 : 1));
|
|
for (const version of foundVersionsSorted) {
|
|
result.exePath = getSdkManagerPath(version);
|
|
result.isFound = fs.existsSync(result.exePath);
|
|
if (result.isFound) {
|
|
return result;
|
|
}
|
|
}
|
|
result.exePath = '';
|
|
return result;
|
|
}
|
|
function callSdkManager(sdkManager, arg) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const acceptBuffer = Buffer.from(Array(10).fill('y').join('\n'), 'utf8');
|
|
yield exec.exec(sdkManager, [arg], {
|
|
input: acceptBuffer
|
|
});
|
|
});
|
|
}
|
|
function installSdkManager() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
fs.mkdirSync(ANDROID_SDK_ROOT, { recursive: true });
|
|
// touch $ANDROID_SDK_ROOT/repositories.cfg
|
|
fs.closeSync(fs.openSync(path.join(ANDROID_SDK_ROOT, 'repositories.cfg'), 'w'));
|
|
const sdkManager = findPreinstalledSdkManager();
|
|
if (!sdkManager.isFound) {
|
|
let cmdlineToolsURL;
|
|
if (process.platform === 'linux') {
|
|
cmdlineToolsURL = COMMANDLINE_TOOLS_LIN_URL;
|
|
}
|
|
else if (process.platform === 'darwin') {
|
|
cmdlineToolsURL = COMMANDLINE_TOOLS_MAC_URL;
|
|
}
|
|
else if (process.platform === 'win32') {
|
|
cmdlineToolsURL = COMMANDLINE_TOOLS_WIN_URL;
|
|
}
|
|
else {
|
|
core.error(`Unsupported platform: ${process.platform}`);
|
|
return '';
|
|
}
|
|
const cmdlineToolsZip = yield tc.downloadTool(cmdlineToolsURL);
|
|
const cmdlineToolsExtractedLocation = yield tc.extractZip(cmdlineToolsZip);
|
|
// Move cmdline-tools to where it would be if it was installed through sdkmanager
|
|
// Will allow calling sdkmanager without --sdk_root='..' argument
|
|
const desiredLocation = path.join(ANDROID_SDK_ROOT, 'cmdline-tools', CMDLINE_TOOLS_VERSION);
|
|
// Create parent directory
|
|
fs.mkdirSync(path.dirname(desiredLocation), { recursive: true });
|
|
// Make sure we don't have leftover target directory (happens sometimes...)
|
|
if (fs.existsSync(desiredLocation))
|
|
fse.removeSync(desiredLocation);
|
|
// @TODO: use io.mv instead of fs-extra.moveSync once following issue is resolved:
|
|
// https://github.com/actions/toolkit/issues/706
|
|
fse.moveSync(path.join(cmdlineToolsExtractedLocation, 'cmdline-tools'), desiredLocation);
|
|
fse.removeSync(cmdlineToolsExtractedLocation);
|
|
sdkManager.exePath = getSdkManagerPath(CMDLINE_TOOLS_VERSION);
|
|
sdkManager.isCorrectVersion = true;
|
|
}
|
|
if (!sdkManager.isCorrectVersion) {
|
|
yield callSdkManager(sdkManager.exePath, `cmdline-tools;${CMDLINE_TOOLS_VERSION}`);
|
|
sdkManager.exePath = getSdkManagerPath(CMDLINE_TOOLS_VERSION);
|
|
}
|
|
return sdkManager.exePath;
|
|
});
|
|
}
|
|
function run() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if ('win16' === process.env['ImageOS']) {
|
|
if (-1 !== ANDROID_SDK_ROOT.indexOf(' ')) {
|
|
// On Windows2016, Android SDK is installed to Program Files,
|
|
// and it doesn't really work..
|
|
// C:\windows\system32\cmd.exe /D /S /C ""C:\Program Files (x86)\Android\android-sdk\cmdline-tools\3.0\bin\sdkmanager.bat" --licenses"
|
|
// Error: Could not find or load main class Files
|
|
const newSDKLocation = ANDROID_SDK_ROOT.replace(/\s/gi, '-');
|
|
core.debug(`moving ${ANDROID_SDK_ROOT} to ${newSDKLocation}`);
|
|
fs.mkdirSync(path.dirname(newSDKLocation), { recursive: true });
|
|
// intentionally using fs.renameSync,
|
|
// because it doesn't move across drives
|
|
fs.renameSync(ANDROID_SDK_ROOT, newSDKLocation);
|
|
ANDROID_SDK_ROOT = newSDKLocation;
|
|
}
|
|
}
|
|
const sdkManager = yield installSdkManager();
|
|
core.debug(`sdkmanager installed to: ${sdkManager}`);
|
|
yield callSdkManager(sdkManager, '--licenses');
|
|
yield callSdkManager(sdkManager, 'tools');
|
|
yield callSdkManager(sdkManager, 'platform-tools');
|
|
core.setOutput('ANDROID_COMMANDLINE_TOOLS_VERSION', COMMANDLINE_TOOLS_VERSION);
|
|
core.exportVariable('ANDROID_HOME', ANDROID_SDK_ROOT);
|
|
core.exportVariable('ANDROID_SDK_ROOT', ANDROID_SDK_ROOT);
|
|
core.addPath(path.dirname(sdkManager));
|
|
core.addPath(path.join(ANDROID_SDK_ROOT, 'platform-tools'));
|
|
core.debug('add matchers');
|
|
// eslint-disable-next-line no-console
|
|
console.log(`##[add-matcher]${path.join(__dirname, '..', 'matchers.json')}`);
|
|
});
|
|
}
|
|
run();
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2357:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("assert");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3129:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("child_process");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7619:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("constants");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6417:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("crypto");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8614:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("events");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5747:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("fs");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8605:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("http");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7211:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("https");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1631:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("net");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2087:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("os");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5622:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("path");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2413:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("stream");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4016:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("tls");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1669:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("util");;
|
|
|
|
/***/ })
|
|
|
|
/******/ });
|
|
/************************************************************************/
|
|
/******/ // The module cache
|
|
/******/ var __webpack_module_cache__ = {};
|
|
/******/
|
|
/******/ // The require function
|
|
/******/ function __nccwpck_require__(moduleId) {
|
|
/******/ // Check if module is in cache
|
|
/******/ if(__webpack_module_cache__[moduleId]) {
|
|
/******/ return __webpack_module_cache__[moduleId].exports;
|
|
/******/ }
|
|
/******/ // Create a new module (and put it into the cache)
|
|
/******/ var module = __webpack_module_cache__[moduleId] = {
|
|
/******/ // no module.id needed
|
|
/******/ // no module.loaded needed
|
|
/******/ exports: {}
|
|
/******/ };
|
|
/******/
|
|
/******/ // Execute the module function
|
|
/******/ var threw = true;
|
|
/******/ try {
|
|
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
|
|
/******/ threw = false;
|
|
/******/ } finally {
|
|
/******/ if(threw) delete __webpack_module_cache__[moduleId];
|
|
/******/ }
|
|
/******/
|
|
/******/ // Return the exports of the module
|
|
/******/ return module.exports;
|
|
/******/ }
|
|
/******/
|
|
/************************************************************************/
|
|
/******/ /* webpack/runtime/compat */
|
|
/******/
|
|
/******/ __nccwpck_require__.ab = __dirname + "/";/************************************************************************/
|
|
/******/ // module exports must be returned from runtime so entry inlining is disabled
|
|
/******/ // startup
|
|
/******/ // Load entry module and return exports
|
|
/******/ return __nccwpck_require__(399);
|
|
/******/ })()
|
|
; |