mirror of
https://github.com/android-actions/setup-android.git
synced 2024-11-23 18:12:03 +00:00
10016 lines
No EOL
314 KiB
JavaScript
10016 lines
No EOL
314 KiB
JavaScript
/******/ (() => { // webpackBootstrap
|
|
/******/ var __webpack_modules__ = ({
|
|
|
|
/***/ 7351:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.issue = exports.issueCommand = void 0;
|
|
const os = __importStar(__nccwpck_require__(2037));
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
/**
|
|
* Commands
|
|
*
|
|
* Command Format:
|
|
* ::name key=value,key=value::message
|
|
*
|
|
* Examples:
|
|
* ::warning::This is the message
|
|
* ::set-env name=MY_VAR::some value
|
|
*/
|
|
function issueCommand(command, properties, message) {
|
|
const cmd = new Command(command, properties, message);
|
|
process.stdout.write(cmd.toString() + os.EOL);
|
|
}
|
|
exports.issueCommand = issueCommand;
|
|
function issue(name, message = '') {
|
|
issueCommand(name, {}, message);
|
|
}
|
|
exports.issue = issue;
|
|
const CMD_STRING = '::';
|
|
class Command {
|
|
constructor(command, properties, message) {
|
|
if (!command) {
|
|
command = 'missing.command';
|
|
}
|
|
this.command = command;
|
|
this.properties = properties;
|
|
this.message = message;
|
|
}
|
|
toString() {
|
|
let cmdStr = CMD_STRING + this.command;
|
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
|
cmdStr += ' ';
|
|
let first = true;
|
|
for (const key in this.properties) {
|
|
if (this.properties.hasOwnProperty(key)) {
|
|
const val = this.properties[key];
|
|
if (val) {
|
|
if (first) {
|
|
first = false;
|
|
}
|
|
else {
|
|
cmdStr += ',';
|
|
}
|
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
|
return cmdStr;
|
|
}
|
|
}
|
|
function escapeData(s) {
|
|
return utils_1.toCommandValue(s)
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A');
|
|
}
|
|
function escapeProperty(s) {
|
|
return utils_1.toCommandValue(s)
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A')
|
|
.replace(/:/g, '%3A')
|
|
.replace(/,/g, '%2C');
|
|
}
|
|
//# sourceMappingURL=command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2186:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
|
const command_1 = __nccwpck_require__(7351);
|
|
const file_command_1 = __nccwpck_require__(717);
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
const os = __importStar(__nccwpck_require__(2037));
|
|
const path = __importStar(__nccwpck_require__(1017));
|
|
const oidc_utils_1 = __nccwpck_require__(8041);
|
|
/**
|
|
* The code to exit an action
|
|
*/
|
|
var ExitCode;
|
|
(function (ExitCode) {
|
|
/**
|
|
* A code indicating that the action was successful
|
|
*/
|
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
|
/**
|
|
* A code indicating that the action was a failure
|
|
*/
|
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
|
//-----------------------------------------------------------------------
|
|
// Variables
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets env variable for this action and future actions in the job
|
|
* @param name the name of the variable to set
|
|
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function exportVariable(name, val) {
|
|
const convertedVal = utils_1.toCommandValue(val);
|
|
process.env[name] = convertedVal;
|
|
const filePath = process.env['GITHUB_ENV'] || '';
|
|
if (filePath) {
|
|
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
|
|
}
|
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
|
}
|
|
exports.exportVariable = exportVariable;
|
|
/**
|
|
* Registers a secret which will get masked from logs
|
|
* @param secret value of the secret
|
|
*/
|
|
function setSecret(secret) {
|
|
command_1.issueCommand('add-mask', {}, secret);
|
|
}
|
|
exports.setSecret = setSecret;
|
|
/**
|
|
* Prepends inputPath to the PATH (for this action and future actions)
|
|
* @param inputPath
|
|
*/
|
|
function addPath(inputPath) {
|
|
const filePath = process.env['GITHUB_PATH'] || '';
|
|
if (filePath) {
|
|
file_command_1.issueFileCommand('PATH', inputPath);
|
|
}
|
|
else {
|
|
command_1.issueCommand('add-path', {}, inputPath);
|
|
}
|
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
|
}
|
|
exports.addPath = addPath;
|
|
/**
|
|
* Gets the value of an input.
|
|
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
|
|
* Returns an empty string if the value is not defined.
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns string
|
|
*/
|
|
function getInput(name, options) {
|
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
|
if (options && options.required && !val) {
|
|
throw new Error(`Input required and not supplied: ${name}`);
|
|
}
|
|
if (options && options.trimWhitespace === false) {
|
|
return val;
|
|
}
|
|
return val.trim();
|
|
}
|
|
exports.getInput = getInput;
|
|
/**
|
|
* Gets the values of an multiline input. Each value is also trimmed.
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns string[]
|
|
*
|
|
*/
|
|
function getMultilineInput(name, options) {
|
|
const inputs = getInput(name, options)
|
|
.split('\n')
|
|
.filter(x => x !== '');
|
|
if (options && options.trimWhitespace === false) {
|
|
return inputs;
|
|
}
|
|
return inputs.map(input => input.trim());
|
|
}
|
|
exports.getMultilineInput = getMultilineInput;
|
|
/**
|
|
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
|
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
|
|
* The return value is also in boolean type.
|
|
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns boolean
|
|
*/
|
|
function getBooleanInput(name, options) {
|
|
const trueValue = ['true', 'True', 'TRUE'];
|
|
const falseValue = ['false', 'False', 'FALSE'];
|
|
const val = getInput(name, options);
|
|
if (trueValue.includes(val))
|
|
return true;
|
|
if (falseValue.includes(val))
|
|
return false;
|
|
throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
|
|
`Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
|
|
}
|
|
exports.getBooleanInput = getBooleanInput;
|
|
/**
|
|
* Sets the value of an output.
|
|
*
|
|
* @param name name of the output to set
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function setOutput(name, value) {
|
|
const filePath = process.env['GITHUB_OUTPUT'] || '';
|
|
if (filePath) {
|
|
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
|
|
}
|
|
process.stdout.write(os.EOL);
|
|
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
|
|
}
|
|
exports.setOutput = setOutput;
|
|
/**
|
|
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
|
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
|
*
|
|
*/
|
|
function setCommandEcho(enabled) {
|
|
command_1.issue('echo', enabled ? 'on' : 'off');
|
|
}
|
|
exports.setCommandEcho = setCommandEcho;
|
|
//-----------------------------------------------------------------------
|
|
// Results
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets the action status to failed.
|
|
* When the action exits it will be with an exit code of 1
|
|
* @param message add error issue message
|
|
*/
|
|
function setFailed(message) {
|
|
process.exitCode = ExitCode.Failure;
|
|
error(message);
|
|
}
|
|
exports.setFailed = setFailed;
|
|
//-----------------------------------------------------------------------
|
|
// Logging Commands
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Gets whether Actions Step Debug is on or not
|
|
*/
|
|
function isDebug() {
|
|
return process.env['RUNNER_DEBUG'] === '1';
|
|
}
|
|
exports.isDebug = isDebug;
|
|
/**
|
|
* Writes debug message to user log
|
|
* @param message debug message
|
|
*/
|
|
function debug(message) {
|
|
command_1.issueCommand('debug', {}, message);
|
|
}
|
|
exports.debug = debug;
|
|
/**
|
|
* Adds an error issue
|
|
* @param message error issue message. Errors will be converted to string via toString()
|
|
* @param properties optional properties to add to the annotation.
|
|
*/
|
|
function error(message, properties = {}) {
|
|
command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.error = error;
|
|
/**
|
|
* Adds a warning issue
|
|
* @param message warning issue message. Errors will be converted to string via toString()
|
|
* @param properties optional properties to add to the annotation.
|
|
*/
|
|
function warning(message, properties = {}) {
|
|
command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.warning = warning;
|
|
/**
|
|
* Adds a notice issue
|
|
* @param message notice issue message. Errors will be converted to string via toString()
|
|
* @param properties optional properties to add to the annotation.
|
|
*/
|
|
function notice(message, properties = {}) {
|
|
command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.notice = notice;
|
|
/**
|
|
* Writes info to log with console.log.
|
|
* @param message info message
|
|
*/
|
|
function info(message) {
|
|
process.stdout.write(message + os.EOL);
|
|
}
|
|
exports.info = info;
|
|
/**
|
|
* Begin an output group.
|
|
*
|
|
* Output until the next `groupEnd` will be foldable in this group
|
|
*
|
|
* @param name The name of the output group
|
|
*/
|
|
function startGroup(name) {
|
|
command_1.issue('group', name);
|
|
}
|
|
exports.startGroup = startGroup;
|
|
/**
|
|
* End an output group.
|
|
*/
|
|
function endGroup() {
|
|
command_1.issue('endgroup');
|
|
}
|
|
exports.endGroup = endGroup;
|
|
/**
|
|
* Wrap an asynchronous function call in a group.
|
|
*
|
|
* Returns the same type as the function itself.
|
|
*
|
|
* @param name The name of the group
|
|
* @param fn The function to wrap in the group
|
|
*/
|
|
function group(name, fn) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
startGroup(name);
|
|
let result;
|
|
try {
|
|
result = yield fn();
|
|
}
|
|
finally {
|
|
endGroup();
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports.group = group;
|
|
//-----------------------------------------------------------------------
|
|
// Wrapper action state
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
|
*
|
|
* @param name name of the state to store
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function saveState(name, value) {
|
|
const filePath = process.env['GITHUB_STATE'] || '';
|
|
if (filePath) {
|
|
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
|
|
}
|
|
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
|
|
}
|
|
exports.saveState = saveState;
|
|
/**
|
|
* Gets the value of an state set by this action's main execution.
|
|
*
|
|
* @param name name of the state to get
|
|
* @returns string
|
|
*/
|
|
function getState(name) {
|
|
return process.env[`STATE_${name}`] || '';
|
|
}
|
|
exports.getState = getState;
|
|
function getIDToken(aud) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return yield oidc_utils_1.OidcClient.getIDToken(aud);
|
|
});
|
|
}
|
|
exports.getIDToken = getIDToken;
|
|
/**
|
|
* Summary exports
|
|
*/
|
|
var summary_1 = __nccwpck_require__(1327);
|
|
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
|
|
/**
|
|
* @deprecated use core.summary
|
|
*/
|
|
var summary_2 = __nccwpck_require__(1327);
|
|
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
|
|
/**
|
|
* Path exports
|
|
*/
|
|
var path_utils_1 = __nccwpck_require__(2981);
|
|
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
|
|
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
|
|
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
|
|
//# sourceMappingURL=core.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 717:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
// For internal use, subject to change.
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
|
// We use any as a valid input type
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
const fs = __importStar(__nccwpck_require__(7147));
|
|
const os = __importStar(__nccwpck_require__(2037));
|
|
const uuid_1 = __nccwpck_require__(8974);
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
function issueFileCommand(command, message) {
|
|
const filePath = process.env[`GITHUB_${command}`];
|
|
if (!filePath) {
|
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
|
}
|
|
if (!fs.existsSync(filePath)) {
|
|
throw new Error(`Missing file at path: ${filePath}`);
|
|
}
|
|
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
|
encoding: 'utf8'
|
|
});
|
|
}
|
|
exports.issueFileCommand = issueFileCommand;
|
|
function prepareKeyValueMessage(key, value) {
|
|
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
|
const convertedValue = utils_1.toCommandValue(value);
|
|
// These should realistically never happen, but just in case someone finds a
|
|
// way to exploit uuid generation let's not allow keys or values that contain
|
|
// the delimiter.
|
|
if (key.includes(delimiter)) {
|
|
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
|
}
|
|
if (convertedValue.includes(delimiter)) {
|
|
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
|
}
|
|
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
|
|
}
|
|
exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
|
//# sourceMappingURL=file-command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8041:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.OidcClient = void 0;
|
|
const http_client_1 = __nccwpck_require__(6255);
|
|
const auth_1 = __nccwpck_require__(5526);
|
|
const core_1 = __nccwpck_require__(2186);
|
|
class OidcClient {
|
|
static createHttpClient(allowRetry = true, maxRetry = 10) {
|
|
const requestOptions = {
|
|
allowRetries: allowRetry,
|
|
maxRetries: maxRetry
|
|
};
|
|
return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
|
|
}
|
|
static getRequestToken() {
|
|
const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
|
|
if (!token) {
|
|
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
|
|
}
|
|
return token;
|
|
}
|
|
static getIDTokenUrl() {
|
|
const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
|
|
if (!runtimeUrl) {
|
|
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
|
|
}
|
|
return runtimeUrl;
|
|
}
|
|
static getCall(id_token_url) {
|
|
var _a;
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const httpclient = OidcClient.createHttpClient();
|
|
const res = yield httpclient
|
|
.getJson(id_token_url)
|
|
.catch(error => {
|
|
throw new Error(`Failed to get ID Token. \n
|
|
Error Code : ${error.statusCode}\n
|
|
Error Message: ${error.result.message}`);
|
|
});
|
|
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
|
|
if (!id_token) {
|
|
throw new Error('Response json body do not have ID Token field');
|
|
}
|
|
return id_token;
|
|
});
|
|
}
|
|
static getIDToken(audience) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
// New ID Token is requested from action service
|
|
let id_token_url = OidcClient.getIDTokenUrl();
|
|
if (audience) {
|
|
const encodedAudience = encodeURIComponent(audience);
|
|
id_token_url = `${id_token_url}&audience=${encodedAudience}`;
|
|
}
|
|
core_1.debug(`ID token url is ${id_token_url}`);
|
|
const id_token = yield OidcClient.getCall(id_token_url);
|
|
core_1.setSecret(id_token);
|
|
return id_token;
|
|
}
|
|
catch (error) {
|
|
throw new Error(`Error message: ${error.message}`);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
exports.OidcClient = OidcClient;
|
|
//# sourceMappingURL=oidc-utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2981:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
|
|
const path = __importStar(__nccwpck_require__(1017));
|
|
/**
|
|
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
|
* replaced with /.
|
|
*
|
|
* @param pth. Path to transform.
|
|
* @return string Posix path.
|
|
*/
|
|
function toPosixPath(pth) {
|
|
return pth.replace(/[\\]/g, '/');
|
|
}
|
|
exports.toPosixPath = toPosixPath;
|
|
/**
|
|
* toWin32Path converts the given path to the win32 form. On Linux, / will be
|
|
* replaced with \\.
|
|
*
|
|
* @param pth. Path to transform.
|
|
* @return string Win32 path.
|
|
*/
|
|
function toWin32Path(pth) {
|
|
return pth.replace(/[/]/g, '\\');
|
|
}
|
|
exports.toWin32Path = toWin32Path;
|
|
/**
|
|
* toPlatformPath converts the given path to a platform-specific path. It does
|
|
* this by replacing instances of / and \ with the platform-specific path
|
|
* separator.
|
|
*
|
|
* @param pth The path to platformize.
|
|
* @return string The platform-specific path.
|
|
*/
|
|
function toPlatformPath(pth) {
|
|
return pth.replace(/[/\\]/g, path.sep);
|
|
}
|
|
exports.toPlatformPath = toPlatformPath;
|
|
//# sourceMappingURL=path-utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1327:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
|
|
const os_1 = __nccwpck_require__(2037);
|
|
const fs_1 = __nccwpck_require__(7147);
|
|
const { access, appendFile, writeFile } = fs_1.promises;
|
|
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
|
|
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
|
|
class Summary {
|
|
constructor() {
|
|
this._buffer = '';
|
|
}
|
|
/**
|
|
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
|
|
* Also checks r/w permissions.
|
|
*
|
|
* @returns step summary file path
|
|
*/
|
|
filePath() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (this._filePath) {
|
|
return this._filePath;
|
|
}
|
|
const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
|
|
if (!pathFromEnv) {
|
|
throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
|
|
}
|
|
try {
|
|
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
|
|
}
|
|
catch (_a) {
|
|
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
|
|
}
|
|
this._filePath = pathFromEnv;
|
|
return this._filePath;
|
|
});
|
|
}
|
|
/**
|
|
* Wraps content in an HTML tag, adding any HTML attributes
|
|
*
|
|
* @param {string} tag HTML tag to wrap
|
|
* @param {string | null} content content within the tag
|
|
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
|
|
*
|
|
* @returns {string} content wrapped in HTML element
|
|
*/
|
|
wrap(tag, content, attrs = {}) {
|
|
const htmlAttrs = Object.entries(attrs)
|
|
.map(([key, value]) => ` ${key}="${value}"`)
|
|
.join('');
|
|
if (!content) {
|
|
return `<${tag}${htmlAttrs}>`;
|
|
}
|
|
return `<${tag}${htmlAttrs}>${content}</${tag}>`;
|
|
}
|
|
/**
|
|
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
|
|
*
|
|
* @param {SummaryWriteOptions} [options] (optional) options for write operation
|
|
*
|
|
* @returns {Promise<Summary>} summary instance
|
|
*/
|
|
write(options) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
|
|
const filePath = yield this.filePath();
|
|
const writeFunc = overwrite ? writeFile : appendFile;
|
|
yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
|
|
return this.emptyBuffer();
|
|
});
|
|
}
|
|
/**
|
|
* Clears the summary buffer and wipes the summary file
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
clear() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.emptyBuffer().write({ overwrite: true });
|
|
});
|
|
}
|
|
/**
|
|
* Returns the current summary buffer as a string
|
|
*
|
|
* @returns {string} string of summary buffer
|
|
*/
|
|
stringify() {
|
|
return this._buffer;
|
|
}
|
|
/**
|
|
* If the summary buffer is empty
|
|
*
|
|
* @returns {boolen} true if the buffer is empty
|
|
*/
|
|
isEmptyBuffer() {
|
|
return this._buffer.length === 0;
|
|
}
|
|
/**
|
|
* Resets the summary buffer without writing to summary file
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
emptyBuffer() {
|
|
this._buffer = '';
|
|
return this;
|
|
}
|
|
/**
|
|
* Adds raw text to the summary buffer
|
|
*
|
|
* @param {string} text content to add
|
|
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addRaw(text, addEOL = false) {
|
|
this._buffer += text;
|
|
return addEOL ? this.addEOL() : this;
|
|
}
|
|
/**
|
|
* Adds the operating system-specific end-of-line marker to the buffer
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addEOL() {
|
|
return this.addRaw(os_1.EOL);
|
|
}
|
|
/**
|
|
* Adds an HTML codeblock to the summary buffer
|
|
*
|
|
* @param {string} code content to render within fenced code block
|
|
* @param {string} lang (optional) language to syntax highlight code
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addCodeBlock(code, lang) {
|
|
const attrs = Object.assign({}, (lang && { lang }));
|
|
const element = this.wrap('pre', this.wrap('code', code), attrs);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML list to the summary buffer
|
|
*
|
|
* @param {string[]} items list of items to render
|
|
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addList(items, ordered = false) {
|
|
const tag = ordered ? 'ol' : 'ul';
|
|
const listItems = items.map(item => this.wrap('li', item)).join('');
|
|
const element = this.wrap(tag, listItems);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML table to the summary buffer
|
|
*
|
|
* @param {SummaryTableCell[]} rows table rows
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addTable(rows) {
|
|
const tableBody = rows
|
|
.map(row => {
|
|
const cells = row
|
|
.map(cell => {
|
|
if (typeof cell === 'string') {
|
|
return this.wrap('td', cell);
|
|
}
|
|
const { header, data, colspan, rowspan } = cell;
|
|
const tag = header ? 'th' : 'td';
|
|
const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
|
|
return this.wrap(tag, data, attrs);
|
|
})
|
|
.join('');
|
|
return this.wrap('tr', cells);
|
|
})
|
|
.join('');
|
|
const element = this.wrap('table', tableBody);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds a collapsable HTML details element to the summary buffer
|
|
*
|
|
* @param {string} label text for the closed state
|
|
* @param {string} content collapsable content
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addDetails(label, content) {
|
|
const element = this.wrap('details', this.wrap('summary', label) + content);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML image tag to the summary buffer
|
|
*
|
|
* @param {string} src path to the image you to embed
|
|
* @param {string} alt text description of the image
|
|
* @param {SummaryImageOptions} options (optional) addition image attributes
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addImage(src, alt, options) {
|
|
const { width, height } = options || {};
|
|
const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
|
|
const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML section heading element
|
|
*
|
|
* @param {string} text heading text
|
|
* @param {number | string} [level=1] (optional) the heading level, default: 1
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addHeading(text, level) {
|
|
const tag = `h${level}`;
|
|
const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
|
|
? tag
|
|
: 'h1';
|
|
const element = this.wrap(allowedTag, text);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML thematic break (<hr>) to the summary buffer
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addSeparator() {
|
|
const element = this.wrap('hr', null);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML line break (<br>) to the summary buffer
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addBreak() {
|
|
const element = this.wrap('br', null);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML blockquote to the summary buffer
|
|
*
|
|
* @param {string} text quote text
|
|
* @param {string} cite (optional) citation url
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addQuote(text, cite) {
|
|
const attrs = Object.assign({}, (cite && { cite }));
|
|
const element = this.wrap('blockquote', text, attrs);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML anchor tag to the summary buffer
|
|
*
|
|
* @param {string} text link text/content
|
|
* @param {string} href hyperlink
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addLink(text, href) {
|
|
const element = this.wrap('a', text, { href });
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
}
|
|
const _summary = new Summary();
|
|
/**
|
|
* @deprecated use `core.summary`
|
|
*/
|
|
exports.markdownSummary = _summary;
|
|
exports.summary = _summary;
|
|
//# sourceMappingURL=summary.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5278:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
// We use any as a valid input type
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.toCommandProperties = exports.toCommandValue = void 0;
|
|
/**
|
|
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
|
* @param input input to sanitize into a string
|
|
*/
|
|
function toCommandValue(input) {
|
|
if (input === null || input === undefined) {
|
|
return '';
|
|
}
|
|
else if (typeof input === 'string' || input instanceof String) {
|
|
return input;
|
|
}
|
|
return JSON.stringify(input);
|
|
}
|
|
exports.toCommandValue = toCommandValue;
|
|
/**
|
|
*
|
|
* @param annotationProperties
|
|
* @returns The command properties to send with the actual annotation command
|
|
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
|
|
*/
|
|
function toCommandProperties(annotationProperties) {
|
|
if (!Object.keys(annotationProperties).length) {
|
|
return {};
|
|
}
|
|
return {
|
|
title: annotationProperties.title,
|
|
file: annotationProperties.file,
|
|
line: annotationProperties.startLine,
|
|
endLine: annotationProperties.endLine,
|
|
col: annotationProperties.startColumn,
|
|
endColumn: annotationProperties.endColumn
|
|
};
|
|
}
|
|
exports.toCommandProperties = toCommandProperties;
|
|
//# sourceMappingURL=utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8974:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
Object.defineProperty(exports, "v1", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _v.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "v3", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _v2.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "v4", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _v3.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "v5", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _v4.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "NIL", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _nil.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "version", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _version.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "validate", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _validate.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "stringify", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _stringify.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "parse", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _parse.default;
|
|
}
|
|
}));
|
|
|
|
var _v = _interopRequireDefault(__nccwpck_require__(1595));
|
|
|
|
var _v2 = _interopRequireDefault(__nccwpck_require__(6993));
|
|
|
|
var _v3 = _interopRequireDefault(__nccwpck_require__(1472));
|
|
|
|
var _v4 = _interopRequireDefault(__nccwpck_require__(6217));
|
|
|
|
var _nil = _interopRequireDefault(__nccwpck_require__(2381));
|
|
|
|
var _version = _interopRequireDefault(__nccwpck_require__(427));
|
|
|
|
var _validate = _interopRequireDefault(__nccwpck_require__(2609));
|
|
|
|
var _stringify = _interopRequireDefault(__nccwpck_require__(1458));
|
|
|
|
var _parse = _interopRequireDefault(__nccwpck_require__(6385));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5842:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function md5(bytes) {
|
|
if (Array.isArray(bytes)) {
|
|
bytes = Buffer.from(bytes);
|
|
} else if (typeof bytes === 'string') {
|
|
bytes = Buffer.from(bytes, 'utf8');
|
|
}
|
|
|
|
return _crypto.default.createHash('md5').update(bytes).digest();
|
|
}
|
|
|
|
var _default = md5;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2381:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
var _default = '00000000-0000-0000-0000-000000000000';
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6385:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _validate = _interopRequireDefault(__nccwpck_require__(2609));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function parse(uuid) {
|
|
if (!(0, _validate.default)(uuid)) {
|
|
throw TypeError('Invalid UUID');
|
|
}
|
|
|
|
let v;
|
|
const arr = new Uint8Array(16); // Parse ########-....-....-....-............
|
|
|
|
arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
|
|
arr[1] = v >>> 16 & 0xff;
|
|
arr[2] = v >>> 8 & 0xff;
|
|
arr[3] = v & 0xff; // Parse ........-####-....-....-............
|
|
|
|
arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
|
|
arr[5] = v & 0xff; // Parse ........-....-####-....-............
|
|
|
|
arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
|
|
arr[7] = v & 0xff; // Parse ........-....-....-####-............
|
|
|
|
arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
|
|
arr[9] = v & 0xff; // Parse ........-....-....-....-############
|
|
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
|
|
|
|
arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
|
|
arr[11] = v / 0x100000000 & 0xff;
|
|
arr[12] = v >>> 24 & 0xff;
|
|
arr[13] = v >>> 16 & 0xff;
|
|
arr[14] = v >>> 8 & 0xff;
|
|
arr[15] = v & 0xff;
|
|
return arr;
|
|
}
|
|
|
|
var _default = parse;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6230:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9784:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = rng;
|
|
|
|
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate
|
|
|
|
let poolPtr = rnds8Pool.length;
|
|
|
|
function rng() {
|
|
if (poolPtr > rnds8Pool.length - 16) {
|
|
_crypto.default.randomFillSync(rnds8Pool);
|
|
|
|
poolPtr = 0;
|
|
}
|
|
|
|
return rnds8Pool.slice(poolPtr, poolPtr += 16);
|
|
}
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8844:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function sha1(bytes) {
|
|
if (Array.isArray(bytes)) {
|
|
bytes = Buffer.from(bytes);
|
|
} else if (typeof bytes === 'string') {
|
|
bytes = Buffer.from(bytes, 'utf8');
|
|
}
|
|
|
|
return _crypto.default.createHash('sha1').update(bytes).digest();
|
|
}
|
|
|
|
var _default = sha1;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1458:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _validate = _interopRequireDefault(__nccwpck_require__(2609));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
/**
|
|
* Convert array of 16 byte values to UUID string format of the form:
|
|
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
|
*/
|
|
const byteToHex = [];
|
|
|
|
for (let i = 0; i < 256; ++i) {
|
|
byteToHex.push((i + 0x100).toString(16).substr(1));
|
|
}
|
|
|
|
function stringify(arr, offset = 0) {
|
|
// Note: Be careful editing this code! It's been tuned for performance
|
|
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
|
|
const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
|
|
// of the following:
|
|
// - One or more input array values don't map to a hex octet (leading to
|
|
// "undefined" in the uuid)
|
|
// - Invalid input values for the RFC `version` or `variant` fields
|
|
|
|
if (!(0, _validate.default)(uuid)) {
|
|
throw TypeError('Stringified UUID is invalid');
|
|
}
|
|
|
|
return uuid;
|
|
}
|
|
|
|
var _default = stringify;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1595:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _rng = _interopRequireDefault(__nccwpck_require__(9784));
|
|
|
|
var _stringify = _interopRequireDefault(__nccwpck_require__(1458));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
// **`v1()` - Generate time-based UUID**
|
|
//
|
|
// Inspired by https://github.com/LiosK/UUID.js
|
|
// and http://docs.python.org/library/uuid.html
|
|
let _nodeId;
|
|
|
|
let _clockseq; // Previous uuid creation time
|
|
|
|
|
|
let _lastMSecs = 0;
|
|
let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
|
|
|
|
function v1(options, buf, offset) {
|
|
let i = buf && offset || 0;
|
|
const b = buf || new Array(16);
|
|
options = options || {};
|
|
let node = options.node || _nodeId;
|
|
let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
|
|
// specified. We do this lazily to minimize issues related to insufficient
|
|
// system entropy. See #189
|
|
|
|
if (node == null || clockseq == null) {
|
|
const seedBytes = options.random || (options.rng || _rng.default)();
|
|
|
|
if (node == null) {
|
|
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
|
|
node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
|
|
}
|
|
|
|
if (clockseq == null) {
|
|
// Per 4.2.2, randomize (14 bit) clockseq
|
|
clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
|
|
}
|
|
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
|
|
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
|
|
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
|
|
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
|
|
|
|
|
|
let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
|
|
// cycle to simulate higher resolution clock
|
|
|
|
let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
|
|
|
|
const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
|
|
|
|
if (dt < 0 && options.clockseq === undefined) {
|
|
clockseq = clockseq + 1 & 0x3fff;
|
|
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
|
|
// time interval
|
|
|
|
|
|
if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
|
|
nsecs = 0;
|
|
} // Per 4.2.1.2 Throw error if too many uuids are requested
|
|
|
|
|
|
if (nsecs >= 10000) {
|
|
throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
|
|
}
|
|
|
|
_lastMSecs = msecs;
|
|
_lastNSecs = nsecs;
|
|
_clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
|
|
|
|
msecs += 12219292800000; // `time_low`
|
|
|
|
const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
|
|
b[i++] = tl >>> 24 & 0xff;
|
|
b[i++] = tl >>> 16 & 0xff;
|
|
b[i++] = tl >>> 8 & 0xff;
|
|
b[i++] = tl & 0xff; // `time_mid`
|
|
|
|
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
|
|
b[i++] = tmh >>> 8 & 0xff;
|
|
b[i++] = tmh & 0xff; // `time_high_and_version`
|
|
|
|
b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
|
|
|
|
b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
|
|
|
|
b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
|
|
|
|
b[i++] = clockseq & 0xff; // `node`
|
|
|
|
for (let n = 0; n < 6; ++n) {
|
|
b[i + n] = node[n];
|
|
}
|
|
|
|
return buf || (0, _stringify.default)(b);
|
|
}
|
|
|
|
var _default = v1;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6993:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _v = _interopRequireDefault(__nccwpck_require__(5920));
|
|
|
|
var _md = _interopRequireDefault(__nccwpck_require__(5842));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
const v3 = (0, _v.default)('v3', 0x30, _md.default);
|
|
var _default = v3;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5920:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = _default;
|
|
exports.URL = exports.DNS = void 0;
|
|
|
|
var _stringify = _interopRequireDefault(__nccwpck_require__(1458));
|
|
|
|
var _parse = _interopRequireDefault(__nccwpck_require__(6385));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function stringToBytes(str) {
|
|
str = unescape(encodeURIComponent(str)); // UTF8 escape
|
|
|
|
const bytes = [];
|
|
|
|
for (let i = 0; i < str.length; ++i) {
|
|
bytes.push(str.charCodeAt(i));
|
|
}
|
|
|
|
return bytes;
|
|
}
|
|
|
|
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
|
|
exports.DNS = DNS;
|
|
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
|
|
exports.URL = URL;
|
|
|
|
function _default(name, version, hashfunc) {
|
|
function generateUUID(value, namespace, buf, offset) {
|
|
if (typeof value === 'string') {
|
|
value = stringToBytes(value);
|
|
}
|
|
|
|
if (typeof namespace === 'string') {
|
|
namespace = (0, _parse.default)(namespace);
|
|
}
|
|
|
|
if (namespace.length !== 16) {
|
|
throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
|
|
} // Compute hash of namespace and value, Per 4.3
|
|
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
|
|
// hashfunc([...namespace, ... value])`
|
|
|
|
|
|
let bytes = new Uint8Array(16 + value.length);
|
|
bytes.set(namespace);
|
|
bytes.set(value, namespace.length);
|
|
bytes = hashfunc(bytes);
|
|
bytes[6] = bytes[6] & 0x0f | version;
|
|
bytes[8] = bytes[8] & 0x3f | 0x80;
|
|
|
|
if (buf) {
|
|
offset = offset || 0;
|
|
|
|
for (let i = 0; i < 16; ++i) {
|
|
buf[offset + i] = bytes[i];
|
|
}
|
|
|
|
return buf;
|
|
}
|
|
|
|
return (0, _stringify.default)(bytes);
|
|
} // Function#name is not settable on some platforms (#270)
|
|
|
|
|
|
try {
|
|
generateUUID.name = name; // eslint-disable-next-line no-empty
|
|
} catch (err) {} // For CommonJS default export support
|
|
|
|
|
|
generateUUID.DNS = DNS;
|
|
generateUUID.URL = URL;
|
|
return generateUUID;
|
|
}
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1472:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _rng = _interopRequireDefault(__nccwpck_require__(9784));
|
|
|
|
var _stringify = _interopRequireDefault(__nccwpck_require__(1458));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function v4(options, buf, offset) {
|
|
options = options || {};
|
|
|
|
const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
|
|
|
|
|
|
rnds[6] = rnds[6] & 0x0f | 0x40;
|
|
rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
|
|
|
|
if (buf) {
|
|
offset = offset || 0;
|
|
|
|
for (let i = 0; i < 16; ++i) {
|
|
buf[offset + i] = rnds[i];
|
|
}
|
|
|
|
return buf;
|
|
}
|
|
|
|
return (0, _stringify.default)(rnds);
|
|
}
|
|
|
|
var _default = v4;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6217:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _v = _interopRequireDefault(__nccwpck_require__(5920));
|
|
|
|
var _sha = _interopRequireDefault(__nccwpck_require__(8844));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
const v5 = (0, _v.default)('v5', 0x50, _sha.default);
|
|
var _default = v5;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2609:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _regex = _interopRequireDefault(__nccwpck_require__(6230));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function validate(uuid) {
|
|
return typeof uuid === 'string' && _regex.default.test(uuid);
|
|
}
|
|
|
|
var _default = validate;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 427:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _validate = _interopRequireDefault(__nccwpck_require__(2609));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function version(uuid) {
|
|
if (!(0, _validate.default)(uuid)) {
|
|
throw TypeError('Invalid UUID');
|
|
}
|
|
|
|
return parseInt(uuid.substr(14, 1), 16);
|
|
}
|
|
|
|
var _default = version;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1514:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.getExecOutput = exports.exec = void 0;
|
|
const string_decoder_1 = __nccwpck_require__(1576);
|
|
const tr = __importStar(__nccwpck_require__(8159));
|
|
/**
|
|
* Exec a command.
|
|
* Output will be streamed to the live console.
|
|
* Returns promise with return code
|
|
*
|
|
* @param commandLine command to execute (can include additional args). Must be correctly escaped.
|
|
* @param args optional arguments for tool. Escaping is handled by the lib.
|
|
* @param options optional exec options. See ExecOptions
|
|
* @returns Promise<number> exit code
|
|
*/
|
|
function exec(commandLine, args, options) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const commandArgs = tr.argStringToArray(commandLine);
|
|
if (commandArgs.length === 0) {
|
|
throw new Error(`Parameter 'commandLine' cannot be null or empty.`);
|
|
}
|
|
// Path to tool to execute should be first arg
|
|
const toolPath = commandArgs[0];
|
|
args = commandArgs.slice(1).concat(args || []);
|
|
const runner = new tr.ToolRunner(toolPath, args, options);
|
|
return runner.exec();
|
|
});
|
|
}
|
|
exports.exec = exec;
|
|
/**
|
|
* Exec a command and get the output.
|
|
* Output will be streamed to the live console.
|
|
* Returns promise with the exit code and collected stdout and stderr
|
|
*
|
|
* @param commandLine command to execute (can include additional args). Must be correctly escaped.
|
|
* @param args optional arguments for tool. Escaping is handled by the lib.
|
|
* @param options optional exec options. See ExecOptions
|
|
* @returns Promise<ExecOutput> exit code, stdout, and stderr
|
|
*/
|
|
function getExecOutput(commandLine, args, options) {
|
|
var _a, _b;
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let stdout = '';
|
|
let stderr = '';
|
|
//Using string decoder covers the case where a mult-byte character is split
|
|
const stdoutDecoder = new string_decoder_1.StringDecoder('utf8');
|
|
const stderrDecoder = new string_decoder_1.StringDecoder('utf8');
|
|
const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout;
|
|
const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr;
|
|
const stdErrListener = (data) => {
|
|
stderr += stderrDecoder.write(data);
|
|
if (originalStdErrListener) {
|
|
originalStdErrListener(data);
|
|
}
|
|
};
|
|
const stdOutListener = (data) => {
|
|
stdout += stdoutDecoder.write(data);
|
|
if (originalStdoutListener) {
|
|
originalStdoutListener(data);
|
|
}
|
|
};
|
|
const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener });
|
|
const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
|
|
//flush any remaining characters
|
|
stdout += stdoutDecoder.end();
|
|
stderr += stderrDecoder.end();
|
|
return {
|
|
exitCode,
|
|
stdout,
|
|
stderr
|
|
};
|
|
});
|
|
}
|
|
exports.getExecOutput = getExecOutput;
|
|
//# sourceMappingURL=exec.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8159:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.argStringToArray = exports.ToolRunner = void 0;
|
|
const os = __importStar(__nccwpck_require__(2037));
|
|
const events = __importStar(__nccwpck_require__(2361));
|
|
const child = __importStar(__nccwpck_require__(2081));
|
|
const path = __importStar(__nccwpck_require__(1017));
|
|
const io = __importStar(__nccwpck_require__(7436));
|
|
const ioUtil = __importStar(__nccwpck_require__(1962));
|
|
const timers_1 = __nccwpck_require__(9512);
|
|
/* eslint-disable @typescript-eslint/unbound-method */
|
|
const IS_WINDOWS = process.platform === 'win32';
|
|
/*
|
|
* Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
|
|
*/
|
|
class ToolRunner extends events.EventEmitter {
|
|
constructor(toolPath, args, options) {
|
|
super();
|
|
if (!toolPath) {
|
|
throw new Error("Parameter 'toolPath' cannot be null or empty.");
|
|
}
|
|
this.toolPath = toolPath;
|
|
this.args = args || [];
|
|
this.options = options || {};
|
|
}
|
|
_debug(message) {
|
|
if (this.options.listeners && this.options.listeners.debug) {
|
|
this.options.listeners.debug(message);
|
|
}
|
|
}
|
|
_getCommandString(options, noPrefix) {
|
|
const toolPath = this._getSpawnFileName();
|
|
const args = this._getSpawnArgs(options);
|
|
let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
|
|
if (IS_WINDOWS) {
|
|
// Windows + cmd file
|
|
if (this._isCmdFile()) {
|
|
cmd += toolPath;
|
|
for (const a of args) {
|
|
cmd += ` ${a}`;
|
|
}
|
|
}
|
|
// Windows + verbatim
|
|
else if (options.windowsVerbatimArguments) {
|
|
cmd += `"${toolPath}"`;
|
|
for (const a of args) {
|
|
cmd += ` ${a}`;
|
|
}
|
|
}
|
|
// Windows (regular)
|
|
else {
|
|
cmd += this._windowsQuoteCmdArg(toolPath);
|
|
for (const a of args) {
|
|
cmd += ` ${this._windowsQuoteCmdArg(a)}`;
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
// OSX/Linux - this can likely be improved with some form of quoting.
|
|
// creating processes on Unix is fundamentally different than Windows.
|
|
// on Unix, execvp() takes an arg array.
|
|
cmd += toolPath;
|
|
for (const a of args) {
|
|
cmd += ` ${a}`;
|
|
}
|
|
}
|
|
return cmd;
|
|
}
|
|
_processLineBuffer(data, strBuffer, onLine) {
|
|
try {
|
|
let s = strBuffer + data.toString();
|
|
let n = s.indexOf(os.EOL);
|
|
while (n > -1) {
|
|
const line = s.substring(0, n);
|
|
onLine(line);
|
|
// the rest of the string ...
|
|
s = s.substring(n + os.EOL.length);
|
|
n = s.indexOf(os.EOL);
|
|
}
|
|
return s;
|
|
}
|
|
catch (err) {
|
|
// streaming lines to console is best effort. Don't fail a build.
|
|
this._debug(`error processing line. Failed with error ${err}`);
|
|
return '';
|
|
}
|
|
}
|
|
_getSpawnFileName() {
|
|
if (IS_WINDOWS) {
|
|
if (this._isCmdFile()) {
|
|
return process.env['COMSPEC'] || 'cmd.exe';
|
|
}
|
|
}
|
|
return this.toolPath;
|
|
}
|
|
_getSpawnArgs(options) {
|
|
if (IS_WINDOWS) {
|
|
if (this._isCmdFile()) {
|
|
let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
|
|
for (const a of this.args) {
|
|
argline += ' ';
|
|
argline += options.windowsVerbatimArguments
|
|
? a
|
|
: this._windowsQuoteCmdArg(a);
|
|
}
|
|
argline += '"';
|
|
return [argline];
|
|
}
|
|
}
|
|
return this.args;
|
|
}
|
|
_endsWith(str, end) {
|
|
return str.endsWith(end);
|
|
}
|
|
_isCmdFile() {
|
|
const upperToolPath = this.toolPath.toUpperCase();
|
|
return (this._endsWith(upperToolPath, '.CMD') ||
|
|
this._endsWith(upperToolPath, '.BAT'));
|
|
}
|
|
_windowsQuoteCmdArg(arg) {
|
|
// for .exe, apply the normal quoting rules that libuv applies
|
|
if (!this._isCmdFile()) {
|
|
return this._uvQuoteCmdArg(arg);
|
|
}
|
|
// otherwise apply quoting rules specific to the cmd.exe command line parser.
|
|
// the libuv rules are generic and are not designed specifically for cmd.exe
|
|
// command line parser.
|
|
//
|
|
// for a detailed description of the cmd.exe command line parser, refer to
|
|
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
|
|
// need quotes for empty arg
|
|
if (!arg) {
|
|
return '""';
|
|
}
|
|
// determine whether the arg needs to be quoted
|
|
const cmdSpecialChars = [
|
|
' ',
|
|
'\t',
|
|
'&',
|
|
'(',
|
|
')',
|
|
'[',
|
|
']',
|
|
'{',
|
|
'}',
|
|
'^',
|
|
'=',
|
|
';',
|
|
'!',
|
|
"'",
|
|
'+',
|
|
',',
|
|
'`',
|
|
'~',
|
|
'|',
|
|
'<',
|
|
'>',
|
|
'"'
|
|
];
|
|
let needsQuotes = false;
|
|
for (const char of arg) {
|
|
if (cmdSpecialChars.some(x => x === char)) {
|
|
needsQuotes = true;
|
|
break;
|
|
}
|
|
}
|
|
// short-circuit if quotes not needed
|
|
if (!needsQuotes) {
|
|
return arg;
|
|
}
|
|
// the following quoting rules are very similar to the rules that by libuv applies.
|
|
//
|
|
// 1) wrap the string in quotes
|
|
//
|
|
// 2) double-up quotes - i.e. " => ""
|
|
//
|
|
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
|
|
// doesn't work well with a cmd.exe command line.
|
|
//
|
|
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
|
|
// for example, the command line:
|
|
// foo.exe "myarg:""my val"""
|
|
// is parsed by a .NET console app into an arg array:
|
|
// [ "myarg:\"my val\"" ]
|
|
// which is the same end result when applying libuv quoting rules. although the actual
|
|
// command line from libuv quoting rules would look like:
|
|
// foo.exe "myarg:\"my val\""
|
|
//
|
|
// 3) double-up slashes that precede a quote,
|
|
// e.g. hello \world => "hello \world"
|
|
// hello\"world => "hello\\""world"
|
|
// hello\\"world => "hello\\\\""world"
|
|
// hello world\ => "hello world\\"
|
|
//
|
|
// technically this is not required for a cmd.exe command line, or the batch argument parser.
|
|
// the reasons for including this as a .cmd quoting rule are:
|
|
//
|
|
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
|
|
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
|
|
//
|
|
// b) it's what we've been doing previously (by deferring to node default behavior) and we
|
|
// haven't heard any complaints about that aspect.
|
|
//
|
|
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
|
|
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
|
|
// by using %%.
|
|
//
|
|
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
|
|
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
|
|
//
|
|
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
|
|
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
|
|
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
|
|
// to an external program.
|
|
//
|
|
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
|
|
// % can be escaped within a .cmd file.
|
|
let reverse = '"';
|
|
let quoteHit = true;
|
|
for (let i = arg.length; i > 0; i--) {
|
|
// walk the string in reverse
|
|
reverse += arg[i - 1];
|
|
if (quoteHit && arg[i - 1] === '\\') {
|
|
reverse += '\\'; // double the slash
|
|
}
|
|
else if (arg[i - 1] === '"') {
|
|
quoteHit = true;
|
|
reverse += '"'; // double the quote
|
|
}
|
|
else {
|
|
quoteHit = false;
|
|
}
|
|
}
|
|
reverse += '"';
|
|
return reverse
|
|
.split('')
|
|
.reverse()
|
|
.join('');
|
|
}
|
|
_uvQuoteCmdArg(arg) {
|
|
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
|
|
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
|
|
// is used.
|
|
//
|
|
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
|
|
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
|
|
// pasting copyright notice from Node within this function:
|
|
//
|
|
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
|
//
|
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
// of this software and associated documentation files (the "Software"), to
|
|
// deal in the Software without restriction, including without limitation the
|
|
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
// sell copies of the Software, and to permit persons to whom the Software is
|
|
// furnished to do so, subject to the following conditions:
|
|
//
|
|
// The above copyright notice and this permission notice shall be included in
|
|
// all copies or substantial portions of the Software.
|
|
//
|
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
// IN THE SOFTWARE.
|
|
if (!arg) {
|
|
// Need double quotation for empty argument
|
|
return '""';
|
|
}
|
|
if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
|
|
// No quotation needed
|
|
return arg;
|
|
}
|
|
if (!arg.includes('"') && !arg.includes('\\')) {
|
|
// No embedded double quotes or backslashes, so I can just wrap
|
|
// quote marks around the whole thing.
|
|
return `"${arg}"`;
|
|
}
|
|
// Expected input/output:
|
|
// input : hello"world
|
|
// output: "hello\"world"
|
|
// input : hello""world
|
|
// output: "hello\"\"world"
|
|
// input : hello\world
|
|
// output: hello\world
|
|
// input : hello\\world
|
|
// output: hello\\world
|
|
// input : hello\"world
|
|
// output: "hello\\\"world"
|
|
// input : hello\\"world
|
|
// output: "hello\\\\\"world"
|
|
// input : hello world\
|
|
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
|
|
// but it appears the comment is wrong, it should be "hello world\\"
|
|
let reverse = '"';
|
|
let quoteHit = true;
|
|
for (let i = arg.length; i > 0; i--) {
|
|
// walk the string in reverse
|
|
reverse += arg[i - 1];
|
|
if (quoteHit && arg[i - 1] === '\\') {
|
|
reverse += '\\';
|
|
}
|
|
else if (arg[i - 1] === '"') {
|
|
quoteHit = true;
|
|
reverse += '\\';
|
|
}
|
|
else {
|
|
quoteHit = false;
|
|
}
|
|
}
|
|
reverse += '"';
|
|
return reverse
|
|
.split('')
|
|
.reverse()
|
|
.join('');
|
|
}
|
|
_cloneExecOptions(options) {
|
|
options = options || {};
|
|
const result = {
|
|
cwd: options.cwd || process.cwd(),
|
|
env: options.env || process.env,
|
|
silent: options.silent || false,
|
|
windowsVerbatimArguments: options.windowsVerbatimArguments || false,
|
|
failOnStdErr: options.failOnStdErr || false,
|
|
ignoreReturnCode: options.ignoreReturnCode || false,
|
|
delay: options.delay || 10000
|
|
};
|
|
result.outStream = options.outStream || process.stdout;
|
|
result.errStream = options.errStream || process.stderr;
|
|
return result;
|
|
}
|
|
_getSpawnOptions(options, toolPath) {
|
|
options = options || {};
|
|
const result = {};
|
|
result.cwd = options.cwd;
|
|
result.env = options.env;
|
|
result['windowsVerbatimArguments'] =
|
|
options.windowsVerbatimArguments || this._isCmdFile();
|
|
if (options.windowsVerbatimArguments) {
|
|
result.argv0 = `"${toolPath}"`;
|
|
}
|
|
return result;
|
|
}
|
|
/**
|
|
* Exec a tool.
|
|
* Output will be streamed to the live console.
|
|
* Returns promise with return code
|
|
*
|
|
* @param tool path to tool to exec
|
|
* @param options optional exec options. See ExecOptions
|
|
* @returns number
|
|
*/
|
|
exec() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// root the tool path if it is unrooted and contains relative pathing
|
|
if (!ioUtil.isRooted(this.toolPath) &&
|
|
(this.toolPath.includes('/') ||
|
|
(IS_WINDOWS && this.toolPath.includes('\\')))) {
|
|
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
|
|
this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);
|
|
}
|
|
// if the tool is only a file name, then resolve it from the PATH
|
|
// otherwise verify it exists (add extension on Windows if necessary)
|
|
this.toolPath = yield io.which(this.toolPath, true);
|
|
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
|
this._debug(`exec tool: ${this.toolPath}`);
|
|
this._debug('arguments:');
|
|
for (const arg of this.args) {
|
|
this._debug(` ${arg}`);
|
|
}
|
|
const optionsNonNull = this._cloneExecOptions(this.options);
|
|
if (!optionsNonNull.silent && optionsNonNull.outStream) {
|
|
optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
|
|
}
|
|
const state = new ExecState(optionsNonNull, this.toolPath);
|
|
state.on('debug', (message) => {
|
|
this._debug(message);
|
|
});
|
|
if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) {
|
|
return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`));
|
|
}
|
|
const fileName = this._getSpawnFileName();
|
|
const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
|
|
let stdbuffer = '';
|
|
if (cp.stdout) {
|
|
cp.stdout.on('data', (data) => {
|
|
if (this.options.listeners && this.options.listeners.stdout) {
|
|
this.options.listeners.stdout(data);
|
|
}
|
|
if (!optionsNonNull.silent && optionsNonNull.outStream) {
|
|
optionsNonNull.outStream.write(data);
|
|
}
|
|
stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => {
|
|
if (this.options.listeners && this.options.listeners.stdline) {
|
|
this.options.listeners.stdline(line);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
let errbuffer = '';
|
|
if (cp.stderr) {
|
|
cp.stderr.on('data', (data) => {
|
|
state.processStderr = true;
|
|
if (this.options.listeners && this.options.listeners.stderr) {
|
|
this.options.listeners.stderr(data);
|
|
}
|
|
if (!optionsNonNull.silent &&
|
|
optionsNonNull.errStream &&
|
|
optionsNonNull.outStream) {
|
|
const s = optionsNonNull.failOnStdErr
|
|
? optionsNonNull.errStream
|
|
: optionsNonNull.outStream;
|
|
s.write(data);
|
|
}
|
|
errbuffer = this._processLineBuffer(data, errbuffer, (line) => {
|
|
if (this.options.listeners && this.options.listeners.errline) {
|
|
this.options.listeners.errline(line);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
cp.on('error', (err) => {
|
|
state.processError = err.message;
|
|
state.processExited = true;
|
|
state.processClosed = true;
|
|
state.CheckComplete();
|
|
});
|
|
cp.on('exit', (code) => {
|
|
state.processExitCode = code;
|
|
state.processExited = true;
|
|
this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
|
|
state.CheckComplete();
|
|
});
|
|
cp.on('close', (code) => {
|
|
state.processExitCode = code;
|
|
state.processExited = true;
|
|
state.processClosed = true;
|
|
this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
|
|
state.CheckComplete();
|
|
});
|
|
state.on('done', (error, exitCode) => {
|
|
if (stdbuffer.length > 0) {
|
|
this.emit('stdline', stdbuffer);
|
|
}
|
|
if (errbuffer.length > 0) {
|
|
this.emit('errline', errbuffer);
|
|
}
|
|
cp.removeAllListeners();
|
|
if (error) {
|
|
reject(error);
|
|
}
|
|
else {
|
|
resolve(exitCode);
|
|
}
|
|
});
|
|
if (this.options.input) {
|
|
if (!cp.stdin) {
|
|
throw new Error('child process missing stdin');
|
|
}
|
|
cp.stdin.end(this.options.input);
|
|
}
|
|
}));
|
|
});
|
|
}
|
|
}
|
|
exports.ToolRunner = ToolRunner;
|
|
/**
|
|
* Convert an arg string to an array of args. Handles escaping
|
|
*
|
|
* @param argString string of arguments
|
|
* @returns string[] array of arguments
|
|
*/
|
|
function argStringToArray(argString) {
|
|
const args = [];
|
|
let inQuotes = false;
|
|
let escaped = false;
|
|
let arg = '';
|
|
function append(c) {
|
|
// we only escape double quotes.
|
|
if (escaped && c !== '"') {
|
|
arg += '\\';
|
|
}
|
|
arg += c;
|
|
escaped = false;
|
|
}
|
|
for (let i = 0; i < argString.length; i++) {
|
|
const c = argString.charAt(i);
|
|
if (c === '"') {
|
|
if (!escaped) {
|
|
inQuotes = !inQuotes;
|
|
}
|
|
else {
|
|
append(c);
|
|
}
|
|
continue;
|
|
}
|
|
if (c === '\\' && escaped) {
|
|
append(c);
|
|
continue;
|
|
}
|
|
if (c === '\\' && inQuotes) {
|
|
escaped = true;
|
|
continue;
|
|
}
|
|
if (c === ' ' && !inQuotes) {
|
|
if (arg.length > 0) {
|
|
args.push(arg);
|
|
arg = '';
|
|
}
|
|
continue;
|
|
}
|
|
append(c);
|
|
}
|
|
if (arg.length > 0) {
|
|
args.push(arg.trim());
|
|
}
|
|
return args;
|
|
}
|
|
exports.argStringToArray = argStringToArray;
|
|
class ExecState extends events.EventEmitter {
|
|
constructor(options, toolPath) {
|
|
super();
|
|
this.processClosed = false; // tracks whether the process has exited and stdio is closed
|
|
this.processError = '';
|
|
this.processExitCode = 0;
|
|
this.processExited = false; // tracks whether the process has exited
|
|
this.processStderr = false; // tracks whether stderr was written to
|
|
this.delay = 10000; // 10 seconds
|
|
this.done = false;
|
|
this.timeout = null;
|
|
if (!toolPath) {
|
|
throw new Error('toolPath must not be empty');
|
|
}
|
|
this.options = options;
|
|
this.toolPath = toolPath;
|
|
if (options.delay) {
|
|
this.delay = options.delay;
|
|
}
|
|
}
|
|
CheckComplete() {
|
|
if (this.done) {
|
|
return;
|
|
}
|
|
if (this.processClosed) {
|
|
this._setResult();
|
|
}
|
|
else if (this.processExited) {
|
|
this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this);
|
|
}
|
|
}
|
|
_debug(message) {
|
|
this.emit('debug', message);
|
|
}
|
|
_setResult() {
|
|
// determine whether there is an error
|
|
let error;
|
|
if (this.processExited) {
|
|
if (this.processError) {
|
|
error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
|
|
}
|
|
else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
|
|
error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
|
|
}
|
|
else if (this.processStderr && this.options.failOnStdErr) {
|
|
error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
|
|
}
|
|
}
|
|
// clear the timeout
|
|
if (this.timeout) {
|
|
clearTimeout(this.timeout);
|
|
this.timeout = null;
|
|
}
|
|
this.done = true;
|
|
this.emit('done', error, this.processExitCode);
|
|
}
|
|
static HandleTimeout(state) {
|
|
if (state.done) {
|
|
return;
|
|
}
|
|
if (!state.processClosed && state.processExited) {
|
|
const message = `The STDIO streams did not close within ${state.delay /
|
|
1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
|
|
state._debug(message);
|
|
}
|
|
state._setResult();
|
|
}
|
|
}
|
|
//# sourceMappingURL=toolrunner.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5526:
|
|
/***/ (function(__unused_webpack_module, exports) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
|
|
class BasicCredentialHandler {
|
|
constructor(username, password) {
|
|
this.username = username;
|
|
this.password = password;
|
|
}
|
|
prepareRequest(options) {
|
|
if (!options.headers) {
|
|
throw Error('The request has no headers');
|
|
}
|
|
options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
|
|
}
|
|
// This handler cannot handle 401
|
|
canHandleAuthentication() {
|
|
return false;
|
|
}
|
|
handleAuthentication() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
throw new Error('not implemented');
|
|
});
|
|
}
|
|
}
|
|
exports.BasicCredentialHandler = BasicCredentialHandler;
|
|
class BearerCredentialHandler {
|
|
constructor(token) {
|
|
this.token = token;
|
|
}
|
|
// currently implements pre-authorization
|
|
// TODO: support preAuth = false where it hooks on 401
|
|
prepareRequest(options) {
|
|
if (!options.headers) {
|
|
throw Error('The request has no headers');
|
|
}
|
|
options.headers['Authorization'] = `Bearer ${this.token}`;
|
|
}
|
|
// This handler cannot handle 401
|
|
canHandleAuthentication() {
|
|
return false;
|
|
}
|
|
handleAuthentication() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
throw new Error('not implemented');
|
|
});
|
|
}
|
|
}
|
|
exports.BearerCredentialHandler = BearerCredentialHandler;
|
|
class PersonalAccessTokenCredentialHandler {
|
|
constructor(token) {
|
|
this.token = token;
|
|
}
|
|
// currently implements pre-authorization
|
|
// TODO: support preAuth = false where it hooks on 401
|
|
prepareRequest(options) {
|
|
if (!options.headers) {
|
|
throw Error('The request has no headers');
|
|
}
|
|
options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
|
|
}
|
|
// This handler cannot handle 401
|
|
canHandleAuthentication() {
|
|
return false;
|
|
}
|
|
handleAuthentication() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
throw new Error('not implemented');
|
|
});
|
|
}
|
|
}
|
|
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
|
|
//# sourceMappingURL=auth.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6255:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
|
|
const http = __importStar(__nccwpck_require__(3685));
|
|
const https = __importStar(__nccwpck_require__(5687));
|
|
const pm = __importStar(__nccwpck_require__(9835));
|
|
const tunnel = __importStar(__nccwpck_require__(4294));
|
|
var HttpCodes;
|
|
(function (HttpCodes) {
|
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
|
var Headers;
|
|
(function (Headers) {
|
|
Headers["Accept"] = "accept";
|
|
Headers["ContentType"] = "content-type";
|
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
|
var MediaTypes;
|
|
(function (MediaTypes) {
|
|
MediaTypes["ApplicationJson"] = "application/json";
|
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
|
/**
|
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
function getProxyUrl(serverUrl) {
|
|
const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
|
return proxyUrl ? proxyUrl.href : '';
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
const HttpRedirectCodes = [
|
|
HttpCodes.MovedPermanently,
|
|
HttpCodes.ResourceMoved,
|
|
HttpCodes.SeeOther,
|
|
HttpCodes.TemporaryRedirect,
|
|
HttpCodes.PermanentRedirect
|
|
];
|
|
const HttpResponseRetryCodes = [
|
|
HttpCodes.BadGateway,
|
|
HttpCodes.ServiceUnavailable,
|
|
HttpCodes.GatewayTimeout
|
|
];
|
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
|
const ExponentialBackoffCeiling = 10;
|
|
const ExponentialBackoffTimeSlice = 5;
|
|
class HttpClientError extends Error {
|
|
constructor(message, statusCode) {
|
|
super(message);
|
|
this.name = 'HttpClientError';
|
|
this.statusCode = statusCode;
|
|
Object.setPrototypeOf(this, HttpClientError.prototype);
|
|
}
|
|
}
|
|
exports.HttpClientError = HttpClientError;
|
|
class HttpClientResponse {
|
|
constructor(message) {
|
|
this.message = message;
|
|
}
|
|
readBody() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
|
let output = Buffer.alloc(0);
|
|
this.message.on('data', (chunk) => {
|
|
output = Buffer.concat([output, chunk]);
|
|
});
|
|
this.message.on('end', () => {
|
|
resolve(output.toString());
|
|
});
|
|
}));
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClientResponse = HttpClientResponse;
|
|
function isHttps(requestUrl) {
|
|
const parsedUrl = new URL(requestUrl);
|
|
return parsedUrl.protocol === 'https:';
|
|
}
|
|
exports.isHttps = isHttps;
|
|
class HttpClient {
|
|
constructor(userAgent, handlers, requestOptions) {
|
|
this._ignoreSslError = false;
|
|
this._allowRedirects = true;
|
|
this._allowRedirectDowngrade = false;
|
|
this._maxRedirects = 50;
|
|
this._allowRetries = false;
|
|
this._maxRetries = 1;
|
|
this._keepAlive = false;
|
|
this._disposed = false;
|
|
this.userAgent = userAgent;
|
|
this.handlers = handlers || [];
|
|
this.requestOptions = requestOptions;
|
|
if (requestOptions) {
|
|
if (requestOptions.ignoreSslError != null) {
|
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
|
}
|
|
this._socketTimeout = requestOptions.socketTimeout;
|
|
if (requestOptions.allowRedirects != null) {
|
|
this._allowRedirects = requestOptions.allowRedirects;
|
|
}
|
|
if (requestOptions.allowRedirectDowngrade != null) {
|
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
|
}
|
|
if (requestOptions.maxRedirects != null) {
|
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
|
}
|
|
if (requestOptions.keepAlive != null) {
|
|
this._keepAlive = requestOptions.keepAlive;
|
|
}
|
|
if (requestOptions.allowRetries != null) {
|
|
this._allowRetries = requestOptions.allowRetries;
|
|
}
|
|
if (requestOptions.maxRetries != null) {
|
|
this._maxRetries = requestOptions.maxRetries;
|
|
}
|
|
}
|
|
}
|
|
options(requestUrl, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
|
});
|
|
}
|
|
get(requestUrl, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
|
});
|
|
}
|
|
del(requestUrl, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
|
});
|
|
}
|
|
post(requestUrl, data, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
|
});
|
|
}
|
|
patch(requestUrl, data, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
|
});
|
|
}
|
|
put(requestUrl, data, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
|
});
|
|
}
|
|
head(requestUrl, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
|
});
|
|
}
|
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
|
});
|
|
}
|
|
/**
|
|
* Gets a typed object from an endpoint
|
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
|
*/
|
|
getJson(requestUrl, additionalHeaders = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
const res = yield this.get(requestUrl, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
});
|
|
}
|
|
postJson(requestUrl, obj, additionalHeaders = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
const res = yield this.post(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
});
|
|
}
|
|
putJson(requestUrl, obj, additionalHeaders = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
const res = yield this.put(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
});
|
|
}
|
|
patchJson(requestUrl, obj, additionalHeaders = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
const res = yield this.patch(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
});
|
|
}
|
|
/**
|
|
* Makes a raw http request.
|
|
* All other methods such as get, post, patch, and request ultimately call this.
|
|
* Prefer get, del, post and patch
|
|
*/
|
|
request(verb, requestUrl, data, headers) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (this._disposed) {
|
|
throw new Error('Client has already been disposed.');
|
|
}
|
|
const parsedUrl = new URL(requestUrl);
|
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
|
// Only perform retries on reads since writes may not be idempotent.
|
|
const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
|
|
? this._maxRetries + 1
|
|
: 1;
|
|
let numTries = 0;
|
|
let response;
|
|
do {
|
|
response = yield this.requestRaw(info, data);
|
|
// Check if it's an authentication challenge
|
|
if (response &&
|
|
response.message &&
|
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
|
let authenticationHandler;
|
|
for (const handler of this.handlers) {
|
|
if (handler.canHandleAuthentication(response)) {
|
|
authenticationHandler = handler;
|
|
break;
|
|
}
|
|
}
|
|
if (authenticationHandler) {
|
|
return authenticationHandler.handleAuthentication(this, info, data);
|
|
}
|
|
else {
|
|
// We have received an unauthorized response but have no handlers to handle it.
|
|
// Let the response return to the caller.
|
|
return response;
|
|
}
|
|
}
|
|
let redirectsRemaining = this._maxRedirects;
|
|
while (response.message.statusCode &&
|
|
HttpRedirectCodes.includes(response.message.statusCode) &&
|
|
this._allowRedirects &&
|
|
redirectsRemaining > 0) {
|
|
const redirectUrl = response.message.headers['location'];
|
|
if (!redirectUrl) {
|
|
// if there's no location to redirect to, we won't
|
|
break;
|
|
}
|
|
const parsedRedirectUrl = new URL(redirectUrl);
|
|
if (parsedUrl.protocol === 'https:' &&
|
|
parsedUrl.protocol !== parsedRedirectUrl.protocol &&
|
|
!this._allowRedirectDowngrade) {
|
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
|
}
|
|
// we need to finish reading the response before reassigning response
|
|
// which will leak the open socket.
|
|
yield response.readBody();
|
|
// strip authorization header if redirected to a different hostname
|
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
|
for (const header in headers) {
|
|
// header names are case insensitive
|
|
if (header.toLowerCase() === 'authorization') {
|
|
delete headers[header];
|
|
}
|
|
}
|
|
}
|
|
// let's make the request with the new redirectUrl
|
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
|
response = yield this.requestRaw(info, data);
|
|
redirectsRemaining--;
|
|
}
|
|
if (!response.message.statusCode ||
|
|
!HttpResponseRetryCodes.includes(response.message.statusCode)) {
|
|
// If not a retry code, return immediately instead of retrying
|
|
return response;
|
|
}
|
|
numTries += 1;
|
|
if (numTries < maxTries) {
|
|
yield response.readBody();
|
|
yield this._performExponentialBackoff(numTries);
|
|
}
|
|
} while (numTries < maxTries);
|
|
return response;
|
|
});
|
|
}
|
|
/**
|
|
* Needs to be called if keepAlive is set to true in request options.
|
|
*/
|
|
dispose() {
|
|
if (this._agent) {
|
|
this._agent.destroy();
|
|
}
|
|
this._disposed = true;
|
|
}
|
|
/**
|
|
* Raw request.
|
|
* @param info
|
|
* @param data
|
|
*/
|
|
requestRaw(info, data) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise((resolve, reject) => {
|
|
function callbackForResult(err, res) {
|
|
if (err) {
|
|
reject(err);
|
|
}
|
|
else if (!res) {
|
|
// If `err` is not passed, then `res` must be passed.
|
|
reject(new Error('Unknown error'));
|
|
}
|
|
else {
|
|
resolve(res);
|
|
}
|
|
}
|
|
this.requestRawWithCallback(info, data, callbackForResult);
|
|
});
|
|
});
|
|
}
|
|
/**
|
|
* Raw request with callback.
|
|
* @param info
|
|
* @param data
|
|
* @param onResult
|
|
*/
|
|
requestRawWithCallback(info, data, onResult) {
|
|
if (typeof data === 'string') {
|
|
if (!info.options.headers) {
|
|
info.options.headers = {};
|
|
}
|
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
|
}
|
|
let callbackCalled = false;
|
|
function handleResult(err, res) {
|
|
if (!callbackCalled) {
|
|
callbackCalled = true;
|
|
onResult(err, res);
|
|
}
|
|
}
|
|
const req = info.httpModule.request(info.options, (msg) => {
|
|
const res = new HttpClientResponse(msg);
|
|
handleResult(undefined, res);
|
|
});
|
|
let socket;
|
|
req.on('socket', sock => {
|
|
socket = sock;
|
|
});
|
|
// If we ever get disconnected, we want the socket to timeout eventually
|
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
|
if (socket) {
|
|
socket.end();
|
|
}
|
|
handleResult(new Error(`Request timeout: ${info.options.path}`));
|
|
});
|
|
req.on('error', function (err) {
|
|
// err has statusCode property
|
|
// res should have headers
|
|
handleResult(err);
|
|
});
|
|
if (data && typeof data === 'string') {
|
|
req.write(data, 'utf8');
|
|
}
|
|
if (data && typeof data !== 'string') {
|
|
data.on('close', function () {
|
|
req.end();
|
|
});
|
|
data.pipe(req);
|
|
}
|
|
else {
|
|
req.end();
|
|
}
|
|
}
|
|
/**
|
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
getAgent(serverUrl) {
|
|
const parsedUrl = new URL(serverUrl);
|
|
return this._getAgent(parsedUrl);
|
|
}
|
|
_prepareRequest(method, requestUrl, headers) {
|
|
const info = {};
|
|
info.parsedUrl = requestUrl;
|
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
|
info.httpModule = usingSsl ? https : http;
|
|
const defaultPort = usingSsl ? 443 : 80;
|
|
info.options = {};
|
|
info.options.host = info.parsedUrl.hostname;
|
|
info.options.port = info.parsedUrl.port
|
|
? parseInt(info.parsedUrl.port)
|
|
: defaultPort;
|
|
info.options.path =
|
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
|
info.options.method = method;
|
|
info.options.headers = this._mergeHeaders(headers);
|
|
if (this.userAgent != null) {
|
|
info.options.headers['user-agent'] = this.userAgent;
|
|
}
|
|
info.options.agent = this._getAgent(info.parsedUrl);
|
|
// gives handlers an opportunity to participate
|
|
if (this.handlers) {
|
|
for (const handler of this.handlers) {
|
|
handler.prepareRequest(info.options);
|
|
}
|
|
}
|
|
return info;
|
|
}
|
|
_mergeHeaders(headers) {
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
|
|
}
|
|
return lowercaseKeys(headers || {});
|
|
}
|
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
|
let clientHeader;
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
|
}
|
|
return additionalHeaders[header] || clientHeader || _default;
|
|
}
|
|
_getAgent(parsedUrl) {
|
|
let agent;
|
|
const proxyUrl = pm.getProxyUrl(parsedUrl);
|
|
const useProxy = proxyUrl && proxyUrl.hostname;
|
|
if (this._keepAlive && useProxy) {
|
|
agent = this._proxyAgent;
|
|
}
|
|
if (this._keepAlive && !useProxy) {
|
|
agent = this._agent;
|
|
}
|
|
// if agent is already assigned use that agent.
|
|
if (agent) {
|
|
return agent;
|
|
}
|
|
const usingSsl = parsedUrl.protocol === 'https:';
|
|
let maxSockets = 100;
|
|
if (this.requestOptions) {
|
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
|
}
|
|
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
|
|
if (proxyUrl && proxyUrl.hostname) {
|
|
const agentOptions = {
|
|
maxSockets,
|
|
keepAlive: this._keepAlive,
|
|
proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
|
|
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
|
|
})), { host: proxyUrl.hostname, port: proxyUrl.port })
|
|
};
|
|
let tunnelAgent;
|
|
const overHttps = proxyUrl.protocol === 'https:';
|
|
if (usingSsl) {
|
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
|
}
|
|
else {
|
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
|
}
|
|
agent = tunnelAgent(agentOptions);
|
|
this._proxyAgent = agent;
|
|
}
|
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
|
if (this._keepAlive && !agent) {
|
|
const options = { keepAlive: this._keepAlive, maxSockets };
|
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
|
this._agent = agent;
|
|
}
|
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
|
if (!agent) {
|
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
|
}
|
|
if (usingSsl && this._ignoreSslError) {
|
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
|
// we have to cast it to any and change it directly
|
|
agent.options = Object.assign(agent.options || {}, {
|
|
rejectUnauthorized: false
|
|
});
|
|
}
|
|
return agent;
|
|
}
|
|
_performExponentialBackoff(retryNumber) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
|
});
|
|
}
|
|
_processResponse(res, options) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
|
const statusCode = res.message.statusCode || 0;
|
|
const response = {
|
|
statusCode,
|
|
result: null,
|
|
headers: {}
|
|
};
|
|
// not found leads to null obj returned
|
|
if (statusCode === HttpCodes.NotFound) {
|
|
resolve(response);
|
|
}
|
|
// get the result from the body
|
|
function dateTimeDeserializer(key, value) {
|
|
if (typeof value === 'string') {
|
|
const a = new Date(value);
|
|
if (!isNaN(a.valueOf())) {
|
|
return a;
|
|
}
|
|
}
|
|
return value;
|
|
}
|
|
let obj;
|
|
let contents;
|
|
try {
|
|
contents = yield res.readBody();
|
|
if (contents && contents.length > 0) {
|
|
if (options && options.deserializeDates) {
|
|
obj = JSON.parse(contents, dateTimeDeserializer);
|
|
}
|
|
else {
|
|
obj = JSON.parse(contents);
|
|
}
|
|
response.result = obj;
|
|
}
|
|
response.headers = res.message.headers;
|
|
}
|
|
catch (err) {
|
|
// Invalid resource (contents not json); leaving result obj null
|
|
}
|
|
// note that 3xx redirects are handled by the http layer.
|
|
if (statusCode > 299) {
|
|
let msg;
|
|
// if exception/error in body, attempt to get better error
|
|
if (obj && obj.message) {
|
|
msg = obj.message;
|
|
}
|
|
else if (contents && contents.length > 0) {
|
|
// it may be the case that the exception is in the body message as string
|
|
msg = contents;
|
|
}
|
|
else {
|
|
msg = `Failed request: (${statusCode})`;
|
|
}
|
|
const err = new HttpClientError(msg, statusCode);
|
|
err.result = response.result;
|
|
reject(err);
|
|
}
|
|
else {
|
|
resolve(response);
|
|
}
|
|
}));
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClient = HttpClient;
|
|
const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9835:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.checkBypass = exports.getProxyUrl = void 0;
|
|
function getProxyUrl(reqUrl) {
|
|
const usingSsl = reqUrl.protocol === 'https:';
|
|
if (checkBypass(reqUrl)) {
|
|
return undefined;
|
|
}
|
|
const proxyVar = (() => {
|
|
if (usingSsl) {
|
|
return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
|
}
|
|
else {
|
|
return process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
|
}
|
|
})();
|
|
if (proxyVar) {
|
|
return new URL(proxyVar);
|
|
}
|
|
else {
|
|
return undefined;
|
|
}
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
function checkBypass(reqUrl) {
|
|
if (!reqUrl.hostname) {
|
|
return false;
|
|
}
|
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
|
if (!noProxy) {
|
|
return false;
|
|
}
|
|
// Determine the request port
|
|
let reqPort;
|
|
if (reqUrl.port) {
|
|
reqPort = Number(reqUrl.port);
|
|
}
|
|
else if (reqUrl.protocol === 'http:') {
|
|
reqPort = 80;
|
|
}
|
|
else if (reqUrl.protocol === 'https:') {
|
|
reqPort = 443;
|
|
}
|
|
// Format the request hostname and hostname with port
|
|
const upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
|
if (typeof reqPort === 'number') {
|
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
|
}
|
|
// Compare request host against noproxy
|
|
for (const upperNoProxyItem of noProxy
|
|
.split(',')
|
|
.map(x => x.trim().toUpperCase())
|
|
.filter(x => x)) {
|
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
exports.checkBypass = checkBypass;
|
|
//# sourceMappingURL=proxy.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1962:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var _a;
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rename = exports.readlink = exports.readdir = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0;
|
|
const fs = __importStar(__nccwpck_require__(7147));
|
|
const path = __importStar(__nccwpck_require__(1017));
|
|
_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
|
|
exports.IS_WINDOWS = process.platform === 'win32';
|
|
function exists(fsPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
yield exports.stat(fsPath);
|
|
}
|
|
catch (err) {
|
|
if (err.code === 'ENOENT') {
|
|
return false;
|
|
}
|
|
throw err;
|
|
}
|
|
return true;
|
|
});
|
|
}
|
|
exports.exists = exists;
|
|
function isDirectory(fsPath, useStat = false) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
|
|
return stats.isDirectory();
|
|
});
|
|
}
|
|
exports.isDirectory = isDirectory;
|
|
/**
|
|
* On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
|
|
* \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
|
|
*/
|
|
function isRooted(p) {
|
|
p = normalizeSeparators(p);
|
|
if (!p) {
|
|
throw new Error('isRooted() parameter "p" cannot be empty');
|
|
}
|
|
if (exports.IS_WINDOWS) {
|
|
return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
|
|
); // e.g. C: or C:\hello
|
|
}
|
|
return p.startsWith('/');
|
|
}
|
|
exports.isRooted = isRooted;
|
|
/**
|
|
* Best effort attempt to determine whether a file exists and is executable.
|
|
* @param filePath file path to check
|
|
* @param extensions additional file extensions to try
|
|
* @return if file exists and is executable, returns the file path. otherwise empty string.
|
|
*/
|
|
function tryGetExecutablePath(filePath, extensions) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let stats = undefined;
|
|
try {
|
|
// test file exists
|
|
stats = yield exports.stat(filePath);
|
|
}
|
|
catch (err) {
|
|
if (err.code !== 'ENOENT') {
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
|
|
}
|
|
}
|
|
if (stats && stats.isFile()) {
|
|
if (exports.IS_WINDOWS) {
|
|
// on Windows, test for valid extension
|
|
const upperExt = path.extname(filePath).toUpperCase();
|
|
if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
|
|
return filePath;
|
|
}
|
|
}
|
|
else {
|
|
if (isUnixExecutable(stats)) {
|
|
return filePath;
|
|
}
|
|
}
|
|
}
|
|
// try each extension
|
|
const originalFilePath = filePath;
|
|
for (const extension of extensions) {
|
|
filePath = originalFilePath + extension;
|
|
stats = undefined;
|
|
try {
|
|
stats = yield exports.stat(filePath);
|
|
}
|
|
catch (err) {
|
|
if (err.code !== 'ENOENT') {
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
|
|
}
|
|
}
|
|
if (stats && stats.isFile()) {
|
|
if (exports.IS_WINDOWS) {
|
|
// preserve the case of the actual file (since an extension was appended)
|
|
try {
|
|
const directory = path.dirname(filePath);
|
|
const upperName = path.basename(filePath).toUpperCase();
|
|
for (const actualName of yield exports.readdir(directory)) {
|
|
if (upperName === actualName.toUpperCase()) {
|
|
filePath = path.join(directory, actualName);
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
catch (err) {
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
|
|
}
|
|
return filePath;
|
|
}
|
|
else {
|
|
if (isUnixExecutable(stats)) {
|
|
return filePath;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return '';
|
|
});
|
|
}
|
|
exports.tryGetExecutablePath = tryGetExecutablePath;
|
|
function normalizeSeparators(p) {
|
|
p = p || '';
|
|
if (exports.IS_WINDOWS) {
|
|
// convert slashes on Windows
|
|
p = p.replace(/\//g, '\\');
|
|
// remove redundant slashes
|
|
return p.replace(/\\\\+/g, '\\');
|
|
}
|
|
// remove redundant slashes
|
|
return p.replace(/\/\/+/g, '/');
|
|
}
|
|
// on Mac/Linux, test the execute bit
|
|
// R W X R W X R W X
|
|
// 256 128 64 32 16 8 4 2 1
|
|
function isUnixExecutable(stats) {
|
|
return ((stats.mode & 1) > 0 ||
|
|
((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
|
|
((stats.mode & 64) > 0 && stats.uid === process.getuid()));
|
|
}
|
|
// Get the path of cmd.exe in windows
|
|
function getCmdPath() {
|
|
var _a;
|
|
return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`;
|
|
}
|
|
exports.getCmdPath = getCmdPath;
|
|
//# sourceMappingURL=io-util.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7436:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0;
|
|
const assert_1 = __nccwpck_require__(9491);
|
|
const childProcess = __importStar(__nccwpck_require__(2081));
|
|
const path = __importStar(__nccwpck_require__(1017));
|
|
const util_1 = __nccwpck_require__(3837);
|
|
const ioUtil = __importStar(__nccwpck_require__(1962));
|
|
const exec = util_1.promisify(childProcess.exec);
|
|
const execFile = util_1.promisify(childProcess.execFile);
|
|
/**
|
|
* Copies a file or folder.
|
|
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
|
|
*
|
|
* @param source source path
|
|
* @param dest destination path
|
|
* @param options optional. See CopyOptions.
|
|
*/
|
|
function cp(source, dest, options = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const { force, recursive, copySourceDirectory } = readCopyOptions(options);
|
|
const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;
|
|
// Dest is an existing file, but not forcing
|
|
if (destStat && destStat.isFile() && !force) {
|
|
return;
|
|
}
|
|
// If dest is an existing directory, should copy inside.
|
|
const newDest = destStat && destStat.isDirectory() && copySourceDirectory
|
|
? path.join(dest, path.basename(source))
|
|
: dest;
|
|
if (!(yield ioUtil.exists(source))) {
|
|
throw new Error(`no such file or directory: ${source}`);
|
|
}
|
|
const sourceStat = yield ioUtil.stat(source);
|
|
if (sourceStat.isDirectory()) {
|
|
if (!recursive) {
|
|
throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);
|
|
}
|
|
else {
|
|
yield cpDirRecursive(source, newDest, 0, force);
|
|
}
|
|
}
|
|
else {
|
|
if (path.relative(source, newDest) === '') {
|
|
// a file cannot be copied to itself
|
|
throw new Error(`'${newDest}' and '${source}' are the same file`);
|
|
}
|
|
yield copyFile(source, newDest, force);
|
|
}
|
|
});
|
|
}
|
|
exports.cp = cp;
|
|
/**
|
|
* Moves a path.
|
|
*
|
|
* @param source source path
|
|
* @param dest destination path
|
|
* @param options optional. See MoveOptions.
|
|
*/
|
|
function mv(source, dest, options = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (yield ioUtil.exists(dest)) {
|
|
let destExists = true;
|
|
if (yield ioUtil.isDirectory(dest)) {
|
|
// If dest is directory copy src into dest
|
|
dest = path.join(dest, path.basename(source));
|
|
destExists = yield ioUtil.exists(dest);
|
|
}
|
|
if (destExists) {
|
|
if (options.force == null || options.force) {
|
|
yield rmRF(dest);
|
|
}
|
|
else {
|
|
throw new Error('Destination already exists');
|
|
}
|
|
}
|
|
}
|
|
yield mkdirP(path.dirname(dest));
|
|
yield ioUtil.rename(source, dest);
|
|
});
|
|
}
|
|
exports.mv = mv;
|
|
/**
|
|
* Remove a path recursively with force
|
|
*
|
|
* @param inputPath path to remove
|
|
*/
|
|
function rmRF(inputPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (ioUtil.IS_WINDOWS) {
|
|
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
|
|
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
|
|
// Check for invalid characters
|
|
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
|
|
if (/[*"<>|]/.test(inputPath)) {
|
|
throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows');
|
|
}
|
|
try {
|
|
const cmdPath = ioUtil.getCmdPath();
|
|
if (yield ioUtil.isDirectory(inputPath, true)) {
|
|
yield exec(`${cmdPath} /s /c "rd /s /q "%inputPath%""`, {
|
|
env: { inputPath }
|
|
});
|
|
}
|
|
else {
|
|
yield exec(`${cmdPath} /s /c "del /f /a "%inputPath%""`, {
|
|
env: { inputPath }
|
|
});
|
|
}
|
|
}
|
|
catch (err) {
|
|
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
// other errors are valid
|
|
if (err.code !== 'ENOENT')
|
|
throw err;
|
|
}
|
|
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
|
|
try {
|
|
yield ioUtil.unlink(inputPath);
|
|
}
|
|
catch (err) {
|
|
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
// other errors are valid
|
|
if (err.code !== 'ENOENT')
|
|
throw err;
|
|
}
|
|
}
|
|
else {
|
|
let isDir = false;
|
|
try {
|
|
isDir = yield ioUtil.isDirectory(inputPath);
|
|
}
|
|
catch (err) {
|
|
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
// other errors are valid
|
|
if (err.code !== 'ENOENT')
|
|
throw err;
|
|
return;
|
|
}
|
|
if (isDir) {
|
|
yield execFile(`rm`, [`-rf`, `${inputPath}`]);
|
|
}
|
|
else {
|
|
yield ioUtil.unlink(inputPath);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
exports.rmRF = rmRF;
|
|
/**
|
|
* Make a directory. Creates the full path with folders in between
|
|
* Will throw if it fails
|
|
*
|
|
* @param fsPath path to create
|
|
* @returns Promise<void>
|
|
*/
|
|
function mkdirP(fsPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert_1.ok(fsPath, 'a path argument must be provided');
|
|
yield ioUtil.mkdir(fsPath, { recursive: true });
|
|
});
|
|
}
|
|
exports.mkdirP = mkdirP;
|
|
/**
|
|
* Returns path of a tool had the tool actually been invoked. Resolves via paths.
|
|
* If you check and the tool does not exist, it will throw.
|
|
*
|
|
* @param tool name of the tool
|
|
* @param check whether to check if tool exists
|
|
* @returns Promise<string> path to tool
|
|
*/
|
|
function which(tool, check) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!tool) {
|
|
throw new Error("parameter 'tool' is required");
|
|
}
|
|
// recursive when check=true
|
|
if (check) {
|
|
const result = yield which(tool, false);
|
|
if (!result) {
|
|
if (ioUtil.IS_WINDOWS) {
|
|
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);
|
|
}
|
|
else {
|
|
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
const matches = yield findInPath(tool);
|
|
if (matches && matches.length > 0) {
|
|
return matches[0];
|
|
}
|
|
return '';
|
|
});
|
|
}
|
|
exports.which = which;
|
|
/**
|
|
* Returns a list of all occurrences of the given tool on the system path.
|
|
*
|
|
* @returns Promise<string[]> the paths of the tool
|
|
*/
|
|
function findInPath(tool) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!tool) {
|
|
throw new Error("parameter 'tool' is required");
|
|
}
|
|
// build the list of extensions to try
|
|
const extensions = [];
|
|
if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) {
|
|
for (const extension of process.env['PATHEXT'].split(path.delimiter)) {
|
|
if (extension) {
|
|
extensions.push(extension);
|
|
}
|
|
}
|
|
}
|
|
// if it's rooted, return it if exists. otherwise return empty.
|
|
if (ioUtil.isRooted(tool)) {
|
|
const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);
|
|
if (filePath) {
|
|
return [filePath];
|
|
}
|
|
return [];
|
|
}
|
|
// if any path separators, return empty
|
|
if (tool.includes(path.sep)) {
|
|
return [];
|
|
}
|
|
// build the list of directories
|
|
//
|
|
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
|
|
// it feels like we should not do this. Checking the current directory seems like more of a use
|
|
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
|
|
// across platforms.
|
|
const directories = [];
|
|
if (process.env.PATH) {
|
|
for (const p of process.env.PATH.split(path.delimiter)) {
|
|
if (p) {
|
|
directories.push(p);
|
|
}
|
|
}
|
|
}
|
|
// find all matches
|
|
const matches = [];
|
|
for (const directory of directories) {
|
|
const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions);
|
|
if (filePath) {
|
|
matches.push(filePath);
|
|
}
|
|
}
|
|
return matches;
|
|
});
|
|
}
|
|
exports.findInPath = findInPath;
|
|
function readCopyOptions(options) {
|
|
const force = options.force == null ? true : options.force;
|
|
const recursive = Boolean(options.recursive);
|
|
const copySourceDirectory = options.copySourceDirectory == null
|
|
? true
|
|
: Boolean(options.copySourceDirectory);
|
|
return { force, recursive, copySourceDirectory };
|
|
}
|
|
function cpDirRecursive(sourceDir, destDir, currentDepth, force) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Ensure there is not a run away recursive copy
|
|
if (currentDepth >= 255)
|
|
return;
|
|
currentDepth++;
|
|
yield mkdirP(destDir);
|
|
const files = yield ioUtil.readdir(sourceDir);
|
|
for (const fileName of files) {
|
|
const srcFile = `${sourceDir}/${fileName}`;
|
|
const destFile = `${destDir}/${fileName}`;
|
|
const srcFileStat = yield ioUtil.lstat(srcFile);
|
|
if (srcFileStat.isDirectory()) {
|
|
// Recurse
|
|
yield cpDirRecursive(srcFile, destFile, currentDepth, force);
|
|
}
|
|
else {
|
|
yield copyFile(srcFile, destFile, force);
|
|
}
|
|
}
|
|
// Change the mode for the newly created directory
|
|
yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);
|
|
});
|
|
}
|
|
// Buffered file copy
|
|
function copyFile(srcFile, destFile, force) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {
|
|
// unlink/re-link it
|
|
try {
|
|
yield ioUtil.lstat(destFile);
|
|
yield ioUtil.unlink(destFile);
|
|
}
|
|
catch (e) {
|
|
// Try to override file permission
|
|
if (e.code === 'EPERM') {
|
|
yield ioUtil.chmod(destFile, '0666');
|
|
yield ioUtil.unlink(destFile);
|
|
}
|
|
// other errors = it doesn't exist, no work to do
|
|
}
|
|
// Copy over symlink
|
|
const symlinkFull = yield ioUtil.readlink(srcFile);
|
|
yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);
|
|
}
|
|
else if (!(yield ioUtil.exists(destFile)) || force) {
|
|
yield ioUtil.copyFile(srcFile, destFile);
|
|
}
|
|
});
|
|
}
|
|
//# sourceMappingURL=io.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2473:
|
|
/***/ (function(module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports._readLinuxVersionFile = exports._getOsVersion = exports._findMatch = void 0;
|
|
const semver = __importStar(__nccwpck_require__(5911));
|
|
const core_1 = __nccwpck_require__(2186);
|
|
// needs to be require for core node modules to be mocked
|
|
/* eslint @typescript-eslint/no-require-imports: 0 */
|
|
const os = __nccwpck_require__(2037);
|
|
const cp = __nccwpck_require__(2081);
|
|
const fs = __nccwpck_require__(7147);
|
|
function _findMatch(versionSpec, stable, candidates, archFilter) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const platFilter = os.platform();
|
|
let result;
|
|
let match;
|
|
let file;
|
|
for (const candidate of candidates) {
|
|
const version = candidate.version;
|
|
core_1.debug(`check ${version} satisfies ${versionSpec}`);
|
|
if (semver.satisfies(version, versionSpec) &&
|
|
(!stable || candidate.stable === stable)) {
|
|
file = candidate.files.find(item => {
|
|
core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
|
|
let chk = item.arch === archFilter && item.platform === platFilter;
|
|
if (chk && item.platform_version) {
|
|
const osVersion = module.exports._getOsVersion();
|
|
if (osVersion === item.platform_version) {
|
|
chk = true;
|
|
}
|
|
else {
|
|
chk = semver.satisfies(osVersion, item.platform_version);
|
|
}
|
|
}
|
|
return chk;
|
|
});
|
|
if (file) {
|
|
core_1.debug(`matched ${candidate.version}`);
|
|
match = candidate;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
if (match && file) {
|
|
// clone since we're mutating the file list to be only the file that matches
|
|
result = Object.assign({}, match);
|
|
result.files = [file];
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports._findMatch = _findMatch;
|
|
function _getOsVersion() {
|
|
// TODO: add windows and other linux, arm variants
|
|
// right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)
|
|
const plat = os.platform();
|
|
let version = '';
|
|
if (plat === 'darwin') {
|
|
version = cp.execSync('sw_vers -productVersion').toString();
|
|
}
|
|
else if (plat === 'linux') {
|
|
// lsb_release process not in some containers, readfile
|
|
// Run cat /etc/lsb-release
|
|
// DISTRIB_ID=Ubuntu
|
|
// DISTRIB_RELEASE=18.04
|
|
// DISTRIB_CODENAME=bionic
|
|
// DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS"
|
|
const lsbContents = module.exports._readLinuxVersionFile();
|
|
if (lsbContents) {
|
|
const lines = lsbContents.split('\n');
|
|
for (const line of lines) {
|
|
const parts = line.split('=');
|
|
if (parts.length === 2 &&
|
|
(parts[0].trim() === 'VERSION_ID' ||
|
|
parts[0].trim() === 'DISTRIB_RELEASE')) {
|
|
version = parts[1]
|
|
.trim()
|
|
.replace(/^"/, '')
|
|
.replace(/"$/, '');
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return version;
|
|
}
|
|
exports._getOsVersion = _getOsVersion;
|
|
function _readLinuxVersionFile() {
|
|
const lsbReleaseFile = '/etc/lsb-release';
|
|
const osReleaseFile = '/etc/os-release';
|
|
let contents = '';
|
|
if (fs.existsSync(lsbReleaseFile)) {
|
|
contents = fs.readFileSync(lsbReleaseFile).toString();
|
|
}
|
|
else if (fs.existsSync(osReleaseFile)) {
|
|
contents = fs.readFileSync(osReleaseFile).toString();
|
|
}
|
|
return contents;
|
|
}
|
|
exports._readLinuxVersionFile = _readLinuxVersionFile;
|
|
//# sourceMappingURL=manifest.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8279:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.RetryHelper = void 0;
|
|
const core = __importStar(__nccwpck_require__(2186));
|
|
/**
|
|
* Internal class for retries
|
|
*/
|
|
class RetryHelper {
|
|
constructor(maxAttempts, minSeconds, maxSeconds) {
|
|
if (maxAttempts < 1) {
|
|
throw new Error('max attempts should be greater than or equal to 1');
|
|
}
|
|
this.maxAttempts = maxAttempts;
|
|
this.minSeconds = Math.floor(minSeconds);
|
|
this.maxSeconds = Math.floor(maxSeconds);
|
|
if (this.minSeconds > this.maxSeconds) {
|
|
throw new Error('min seconds should be less than or equal to max seconds');
|
|
}
|
|
}
|
|
execute(action, isRetryable) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let attempt = 1;
|
|
while (attempt < this.maxAttempts) {
|
|
// Try
|
|
try {
|
|
return yield action();
|
|
}
|
|
catch (err) {
|
|
if (isRetryable && !isRetryable(err)) {
|
|
throw err;
|
|
}
|
|
core.info(err.message);
|
|
}
|
|
// Sleep
|
|
const seconds = this.getSleepAmount();
|
|
core.info(`Waiting ${seconds} seconds before trying again`);
|
|
yield this.sleep(seconds);
|
|
attempt++;
|
|
}
|
|
// Last attempt
|
|
return yield action();
|
|
});
|
|
}
|
|
getSleepAmount() {
|
|
return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +
|
|
this.minSeconds);
|
|
}
|
|
sleep(seconds) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise(resolve => setTimeout(resolve, seconds * 1000));
|
|
});
|
|
}
|
|
}
|
|
exports.RetryHelper = RetryHelper;
|
|
//# sourceMappingURL=retry-helper.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7784:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.evaluateVersions = exports.isExplicitVersion = exports.findFromManifest = exports.getManifestFromRepo = exports.findAllVersions = exports.find = exports.cacheFile = exports.cacheDir = exports.extractZip = exports.extractXar = exports.extractTar = exports.extract7z = exports.downloadTool = exports.HTTPError = void 0;
|
|
const core = __importStar(__nccwpck_require__(2186));
|
|
const io = __importStar(__nccwpck_require__(7436));
|
|
const fs = __importStar(__nccwpck_require__(7147));
|
|
const mm = __importStar(__nccwpck_require__(2473));
|
|
const os = __importStar(__nccwpck_require__(2037));
|
|
const path = __importStar(__nccwpck_require__(1017));
|
|
const httpm = __importStar(__nccwpck_require__(6255));
|
|
const semver = __importStar(__nccwpck_require__(5911));
|
|
const stream = __importStar(__nccwpck_require__(2781));
|
|
const util = __importStar(__nccwpck_require__(3837));
|
|
const assert_1 = __nccwpck_require__(9491);
|
|
const v4_1 = __importDefault(__nccwpck_require__(824));
|
|
const exec_1 = __nccwpck_require__(1514);
|
|
const retry_helper_1 = __nccwpck_require__(8279);
|
|
class HTTPError extends Error {
|
|
constructor(httpStatusCode) {
|
|
super(`Unexpected HTTP response: ${httpStatusCode}`);
|
|
this.httpStatusCode = httpStatusCode;
|
|
Object.setPrototypeOf(this, new.target.prototype);
|
|
}
|
|
}
|
|
exports.HTTPError = HTTPError;
|
|
const IS_WINDOWS = process.platform === 'win32';
|
|
const IS_MAC = process.platform === 'darwin';
|
|
const userAgent = 'actions/tool-cache';
|
|
/**
|
|
* Download a tool from an url and stream it into a file
|
|
*
|
|
* @param url url of tool to download
|
|
* @param dest path to download tool
|
|
* @param auth authorization header
|
|
* @param headers other headers
|
|
* @returns path to downloaded tool
|
|
*/
|
|
function downloadTool(url, dest, auth, headers) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
dest = dest || path.join(_getTempDirectory(), v4_1.default());
|
|
yield io.mkdirP(path.dirname(dest));
|
|
core.debug(`Downloading ${url}`);
|
|
core.debug(`Destination ${dest}`);
|
|
const maxAttempts = 3;
|
|
const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
|
|
const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
|
|
const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);
|
|
return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
|
return yield downloadToolAttempt(url, dest || '', auth, headers);
|
|
}), (err) => {
|
|
if (err instanceof HTTPError && err.httpStatusCode) {
|
|
// Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
|
|
if (err.httpStatusCode < 500 &&
|
|
err.httpStatusCode !== 408 &&
|
|
err.httpStatusCode !== 429) {
|
|
return false;
|
|
}
|
|
}
|
|
// Otherwise retry
|
|
return true;
|
|
});
|
|
});
|
|
}
|
|
exports.downloadTool = downloadTool;
|
|
function downloadToolAttempt(url, dest, auth, headers) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (fs.existsSync(dest)) {
|
|
throw new Error(`Destination file path ${dest} already exists`);
|
|
}
|
|
// Get the response headers
|
|
const http = new httpm.HttpClient(userAgent, [], {
|
|
allowRetries: false
|
|
});
|
|
if (auth) {
|
|
core.debug('set auth');
|
|
if (headers === undefined) {
|
|
headers = {};
|
|
}
|
|
headers.authorization = auth;
|
|
}
|
|
const response = yield http.get(url, headers);
|
|
if (response.message.statusCode !== 200) {
|
|
const err = new HTTPError(response.message.statusCode);
|
|
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
|
throw err;
|
|
}
|
|
// Download the response body
|
|
const pipeline = util.promisify(stream.pipeline);
|
|
const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);
|
|
const readStream = responseMessageFactory();
|
|
let succeeded = false;
|
|
try {
|
|
yield pipeline(readStream, fs.createWriteStream(dest));
|
|
core.debug('download complete');
|
|
succeeded = true;
|
|
return dest;
|
|
}
|
|
finally {
|
|
// Error, delete dest before retry
|
|
if (!succeeded) {
|
|
core.debug('download failed');
|
|
try {
|
|
yield io.rmRF(dest);
|
|
}
|
|
catch (err) {
|
|
core.debug(`Failed to delete '${dest}'. ${err.message}`);
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Extract a .7z file
|
|
*
|
|
* @param file path to the .7z file
|
|
* @param dest destination directory. Optional.
|
|
* @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
|
|
* problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
|
|
* gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
|
|
* bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
|
|
* interface, it is smaller than the full command line interface, and it does support long paths. At the
|
|
* time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
|
|
* Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
|
|
* to 7zr.exe can be pass to this function.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extract7z(file, dest, _7zPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
|
|
assert_1.ok(file, 'parameter "file" is required');
|
|
dest = yield _createExtractFolder(dest);
|
|
const originalCwd = process.cwd();
|
|
process.chdir(dest);
|
|
if (_7zPath) {
|
|
try {
|
|
const logLevel = core.isDebug() ? '-bb1' : '-bb0';
|
|
const args = [
|
|
'x',
|
|
logLevel,
|
|
'-bd',
|
|
'-sccUTF-8',
|
|
file
|
|
];
|
|
const options = {
|
|
silent: true
|
|
};
|
|
yield exec_1.exec(`"${_7zPath}"`, args, options);
|
|
}
|
|
finally {
|
|
process.chdir(originalCwd);
|
|
}
|
|
}
|
|
else {
|
|
const escapedScript = path
|
|
.join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
|
|
.replace(/'/g, "''")
|
|
.replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
|
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
|
|
const args = [
|
|
'-NoLogo',
|
|
'-Sta',
|
|
'-NoProfile',
|
|
'-NonInteractive',
|
|
'-ExecutionPolicy',
|
|
'Unrestricted',
|
|
'-Command',
|
|
command
|
|
];
|
|
const options = {
|
|
silent: true
|
|
};
|
|
try {
|
|
const powershellPath = yield io.which('powershell', true);
|
|
yield exec_1.exec(`"${powershellPath}"`, args, options);
|
|
}
|
|
finally {
|
|
process.chdir(originalCwd);
|
|
}
|
|
}
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extract7z = extract7z;
|
|
/**
|
|
* Extract a compressed tar archive
|
|
*
|
|
* @param file path to the tar
|
|
* @param dest destination directory. Optional.
|
|
* @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extractTar(file, dest, flags = 'xz') {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!file) {
|
|
throw new Error("parameter 'file' is required");
|
|
}
|
|
// Create dest
|
|
dest = yield _createExtractFolder(dest);
|
|
// Determine whether GNU tar
|
|
core.debug('Checking tar --version');
|
|
let versionOutput = '';
|
|
yield exec_1.exec('tar --version', [], {
|
|
ignoreReturnCode: true,
|
|
silent: true,
|
|
listeners: {
|
|
stdout: (data) => (versionOutput += data.toString()),
|
|
stderr: (data) => (versionOutput += data.toString())
|
|
}
|
|
});
|
|
core.debug(versionOutput.trim());
|
|
const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
|
|
// Initialize args
|
|
let args;
|
|
if (flags instanceof Array) {
|
|
args = flags;
|
|
}
|
|
else {
|
|
args = [flags];
|
|
}
|
|
if (core.isDebug() && !flags.includes('v')) {
|
|
args.push('-v');
|
|
}
|
|
let destArg = dest;
|
|
let fileArg = file;
|
|
if (IS_WINDOWS && isGnuTar) {
|
|
args.push('--force-local');
|
|
destArg = dest.replace(/\\/g, '/');
|
|
// Technically only the dest needs to have `/` but for aesthetic consistency
|
|
// convert slashes in the file arg too.
|
|
fileArg = file.replace(/\\/g, '/');
|
|
}
|
|
if (isGnuTar) {
|
|
// Suppress warnings when using GNU tar to extract archives created by BSD tar
|
|
args.push('--warning=no-unknown-keyword');
|
|
args.push('--overwrite');
|
|
}
|
|
args.push('-C', destArg, '-f', fileArg);
|
|
yield exec_1.exec(`tar`, args);
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extractTar = extractTar;
|
|
/**
|
|
* Extract a xar compatible archive
|
|
*
|
|
* @param file path to the archive
|
|
* @param dest destination directory. Optional.
|
|
* @param flags flags for the xar. Optional.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extractXar(file, dest, flags = []) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
assert_1.ok(IS_MAC, 'extractXar() not supported on current OS');
|
|
assert_1.ok(file, 'parameter "file" is required');
|
|
dest = yield _createExtractFolder(dest);
|
|
let args;
|
|
if (flags instanceof Array) {
|
|
args = flags;
|
|
}
|
|
else {
|
|
args = [flags];
|
|
}
|
|
args.push('-x', '-C', dest, '-f', file);
|
|
if (core.isDebug()) {
|
|
args.push('-v');
|
|
}
|
|
const xarPath = yield io.which('xar', true);
|
|
yield exec_1.exec(`"${xarPath}"`, _unique(args));
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extractXar = extractXar;
|
|
/**
|
|
* Extract a zip
|
|
*
|
|
* @param file path to the zip
|
|
* @param dest destination directory. Optional.
|
|
* @returns path to the destination directory
|
|
*/
|
|
function extractZip(file, dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!file) {
|
|
throw new Error("parameter 'file' is required");
|
|
}
|
|
dest = yield _createExtractFolder(dest);
|
|
if (IS_WINDOWS) {
|
|
yield extractZipWin(file, dest);
|
|
}
|
|
else {
|
|
yield extractZipNix(file, dest);
|
|
}
|
|
return dest;
|
|
});
|
|
}
|
|
exports.extractZip = extractZip;
|
|
function extractZipWin(file, dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// build the powershell command
|
|
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
|
const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
const pwshPath = yield io.which('pwsh', false);
|
|
//To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory
|
|
//and the -Force flag for Expand-Archive as a fallback
|
|
if (pwshPath) {
|
|
//attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive
|
|
const pwshCommand = [
|
|
`$ErrorActionPreference = 'Stop' ;`,
|
|
`try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`,
|
|
`try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`,
|
|
`catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;`
|
|
].join(' ');
|
|
const args = [
|
|
'-NoLogo',
|
|
'-NoProfile',
|
|
'-NonInteractive',
|
|
'-ExecutionPolicy',
|
|
'Unrestricted',
|
|
'-Command',
|
|
pwshCommand
|
|
];
|
|
core.debug(`Using pwsh at path: ${pwshPath}`);
|
|
yield exec_1.exec(`"${pwshPath}"`, args);
|
|
}
|
|
else {
|
|
const powershellCommand = [
|
|
`$ErrorActionPreference = 'Stop' ;`,
|
|
`try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`,
|
|
`if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`,
|
|
`else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`
|
|
].join(' ');
|
|
const args = [
|
|
'-NoLogo',
|
|
'-Sta',
|
|
'-NoProfile',
|
|
'-NonInteractive',
|
|
'-ExecutionPolicy',
|
|
'Unrestricted',
|
|
'-Command',
|
|
powershellCommand
|
|
];
|
|
const powershellPath = yield io.which('powershell', true);
|
|
core.debug(`Using powershell at path: ${powershellPath}`);
|
|
yield exec_1.exec(`"${powershellPath}"`, args);
|
|
}
|
|
});
|
|
}
|
|
function extractZipNix(file, dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const unzipPath = yield io.which('unzip', true);
|
|
const args = [file];
|
|
if (!core.isDebug()) {
|
|
args.unshift('-q');
|
|
}
|
|
args.unshift('-o'); //overwrite with -o, otherwise a prompt is shown which freezes the run
|
|
yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest });
|
|
});
|
|
}
|
|
/**
|
|
* Caches a directory and installs it into the tool cacheDir
|
|
*
|
|
* @param sourceDir the directory to cache into tools
|
|
* @param tool tool name
|
|
* @param version version of the tool. semver format
|
|
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
|
*/
|
|
function cacheDir(sourceDir, tool, version, arch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
version = semver.clean(version) || version;
|
|
arch = arch || os.arch();
|
|
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
|
core.debug(`source dir: ${sourceDir}`);
|
|
if (!fs.statSync(sourceDir).isDirectory()) {
|
|
throw new Error('sourceDir is not a directory');
|
|
}
|
|
// Create the tool dir
|
|
const destPath = yield _createToolPath(tool, version, arch);
|
|
// copy each child item. do not move. move can fail on Windows
|
|
// due to anti-virus software having an open handle on a file.
|
|
for (const itemName of fs.readdirSync(sourceDir)) {
|
|
const s = path.join(sourceDir, itemName);
|
|
yield io.cp(s, destPath, { recursive: true });
|
|
}
|
|
// write .complete
|
|
_completeToolPath(tool, version, arch);
|
|
return destPath;
|
|
});
|
|
}
|
|
exports.cacheDir = cacheDir;
|
|
/**
|
|
* Caches a downloaded file (GUID) and installs it
|
|
* into the tool cache with a given targetName
|
|
*
|
|
* @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
|
|
* @param targetFile the name of the file name in the tools directory
|
|
* @param tool tool name
|
|
* @param version version of the tool. semver format
|
|
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
|
*/
|
|
function cacheFile(sourceFile, targetFile, tool, version, arch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
version = semver.clean(version) || version;
|
|
arch = arch || os.arch();
|
|
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
|
core.debug(`source file: ${sourceFile}`);
|
|
if (!fs.statSync(sourceFile).isFile()) {
|
|
throw new Error('sourceFile is not a file');
|
|
}
|
|
// create the tool dir
|
|
const destFolder = yield _createToolPath(tool, version, arch);
|
|
// copy instead of move. move can fail on Windows due to
|
|
// anti-virus software having an open handle on a file.
|
|
const destPath = path.join(destFolder, targetFile);
|
|
core.debug(`destination file ${destPath}`);
|
|
yield io.cp(sourceFile, destPath);
|
|
// write .complete
|
|
_completeToolPath(tool, version, arch);
|
|
return destFolder;
|
|
});
|
|
}
|
|
exports.cacheFile = cacheFile;
|
|
/**
|
|
* Finds the path to a tool version in the local installed tool cache
|
|
*
|
|
* @param toolName name of the tool
|
|
* @param versionSpec version of the tool
|
|
* @param arch optional arch. defaults to arch of computer
|
|
*/
|
|
function find(toolName, versionSpec, arch) {
|
|
if (!toolName) {
|
|
throw new Error('toolName parameter is required');
|
|
}
|
|
if (!versionSpec) {
|
|
throw new Error('versionSpec parameter is required');
|
|
}
|
|
arch = arch || os.arch();
|
|
// attempt to resolve an explicit version
|
|
if (!isExplicitVersion(versionSpec)) {
|
|
const localVersions = findAllVersions(toolName, arch);
|
|
const match = evaluateVersions(localVersions, versionSpec);
|
|
versionSpec = match;
|
|
}
|
|
// check for the explicit version in the cache
|
|
let toolPath = '';
|
|
if (versionSpec) {
|
|
versionSpec = semver.clean(versionSpec) || '';
|
|
const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);
|
|
core.debug(`checking cache: ${cachePath}`);
|
|
if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
|
|
core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
|
|
toolPath = cachePath;
|
|
}
|
|
else {
|
|
core.debug('not found');
|
|
}
|
|
}
|
|
return toolPath;
|
|
}
|
|
exports.find = find;
|
|
/**
|
|
* Finds the paths to all versions of a tool that are installed in the local tool cache
|
|
*
|
|
* @param toolName name of the tool
|
|
* @param arch optional arch. defaults to arch of computer
|
|
*/
|
|
function findAllVersions(toolName, arch) {
|
|
const versions = [];
|
|
arch = arch || os.arch();
|
|
const toolPath = path.join(_getCacheDirectory(), toolName);
|
|
if (fs.existsSync(toolPath)) {
|
|
const children = fs.readdirSync(toolPath);
|
|
for (const child of children) {
|
|
if (isExplicitVersion(child)) {
|
|
const fullPath = path.join(toolPath, child, arch || '');
|
|
if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
|
|
versions.push(child);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return versions;
|
|
}
|
|
exports.findAllVersions = findAllVersions;
|
|
function getManifestFromRepo(owner, repo, auth, branch = 'master') {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
let releases = [];
|
|
const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`;
|
|
const http = new httpm.HttpClient('tool-cache');
|
|
const headers = {};
|
|
if (auth) {
|
|
core.debug('set auth');
|
|
headers.authorization = auth;
|
|
}
|
|
const response = yield http.getJson(treeUrl, headers);
|
|
if (!response.result) {
|
|
return releases;
|
|
}
|
|
let manifestUrl = '';
|
|
for (const item of response.result.tree) {
|
|
if (item.path === 'versions-manifest.json') {
|
|
manifestUrl = item.url;
|
|
break;
|
|
}
|
|
}
|
|
headers['accept'] = 'application/vnd.github.VERSION.raw';
|
|
let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody();
|
|
if (versionsRaw) {
|
|
// shouldn't be needed but protects against invalid json saved with BOM
|
|
versionsRaw = versionsRaw.replace(/^\uFEFF/, '');
|
|
try {
|
|
releases = JSON.parse(versionsRaw);
|
|
}
|
|
catch (_a) {
|
|
core.debug('Invalid json');
|
|
}
|
|
}
|
|
return releases;
|
|
});
|
|
}
|
|
exports.getManifestFromRepo = getManifestFromRepo;
|
|
function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// wrap the internal impl
|
|
const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter);
|
|
return match;
|
|
});
|
|
}
|
|
exports.findFromManifest = findFromManifest;
|
|
function _createExtractFolder(dest) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (!dest) {
|
|
// create a temp dir
|
|
dest = path.join(_getTempDirectory(), v4_1.default());
|
|
}
|
|
yield io.mkdirP(dest);
|
|
return dest;
|
|
});
|
|
}
|
|
function _createToolPath(tool, version, arch) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
|
core.debug(`destination ${folderPath}`);
|
|
const markerPath = `${folderPath}.complete`;
|
|
yield io.rmRF(folderPath);
|
|
yield io.rmRF(markerPath);
|
|
yield io.mkdirP(folderPath);
|
|
return folderPath;
|
|
});
|
|
}
|
|
function _completeToolPath(tool, version, arch) {
|
|
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
|
const markerPath = `${folderPath}.complete`;
|
|
fs.writeFileSync(markerPath, '');
|
|
core.debug('finished caching tool');
|
|
}
|
|
/**
|
|
* Check if version string is explicit
|
|
*
|
|
* @param versionSpec version string to check
|
|
*/
|
|
function isExplicitVersion(versionSpec) {
|
|
const c = semver.clean(versionSpec) || '';
|
|
core.debug(`isExplicit: ${c}`);
|
|
const valid = semver.valid(c) != null;
|
|
core.debug(`explicit? ${valid}`);
|
|
return valid;
|
|
}
|
|
exports.isExplicitVersion = isExplicitVersion;
|
|
/**
|
|
* Get the highest satisfiying semantic version in `versions` which satisfies `versionSpec`
|
|
*
|
|
* @param versions array of versions to evaluate
|
|
* @param versionSpec semantic version spec to satisfy
|
|
*/
|
|
function evaluateVersions(versions, versionSpec) {
|
|
let version = '';
|
|
core.debug(`evaluating ${versions.length} versions`);
|
|
versions = versions.sort((a, b) => {
|
|
if (semver.gt(a, b)) {
|
|
return 1;
|
|
}
|
|
return -1;
|
|
});
|
|
for (let i = versions.length - 1; i >= 0; i--) {
|
|
const potential = versions[i];
|
|
const satisfied = semver.satisfies(potential, versionSpec);
|
|
if (satisfied) {
|
|
version = potential;
|
|
break;
|
|
}
|
|
}
|
|
if (version) {
|
|
core.debug(`matched: ${version}`);
|
|
}
|
|
else {
|
|
core.debug('match not found');
|
|
}
|
|
return version;
|
|
}
|
|
exports.evaluateVersions = evaluateVersions;
|
|
/**
|
|
* Gets RUNNER_TOOL_CACHE
|
|
*/
|
|
function _getCacheDirectory() {
|
|
const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';
|
|
assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');
|
|
return cacheDirectory;
|
|
}
|
|
/**
|
|
* Gets RUNNER_TEMP
|
|
*/
|
|
function _getTempDirectory() {
|
|
const tempDirectory = process.env['RUNNER_TEMP'] || '';
|
|
assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
|
|
return tempDirectory;
|
|
}
|
|
/**
|
|
* Gets a global variable
|
|
*/
|
|
function _getGlobal(key, defaultValue) {
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
const value = global[key];
|
|
/* eslint-enable @typescript-eslint/no-explicit-any */
|
|
return value !== undefined ? value : defaultValue;
|
|
}
|
|
/**
|
|
* Returns an array of unique values.
|
|
* @param values Values to make unique.
|
|
*/
|
|
function _unique(values) {
|
|
return Array.from(new Set(values));
|
|
}
|
|
//# sourceMappingURL=tool-cache.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9618:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(1017)
|
|
const mkdirsSync = (__nccwpck_require__(8605).mkdirsSync)
|
|
const utimesMillisSync = (__nccwpck_require__(2548).utimesMillisSync)
|
|
const stat = __nccwpck_require__(3901)
|
|
|
|
function copySync (src, dest, opts) {
|
|
if (typeof opts === 'function') {
|
|
opts = { filter: opts }
|
|
}
|
|
|
|
opts = opts || {}
|
|
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
|
|
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
|
|
|
|
// Warn about using preserveTimestamps on 32-bit node
|
|
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
|
process.emitWarning(
|
|
'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' +
|
|
'\tsee https://github.com/jprichardson/node-fs-extra/issues/269',
|
|
'Warning', 'fs-extra-WARN0002'
|
|
)
|
|
}
|
|
|
|
const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts)
|
|
stat.checkParentPathsSync(src, srcStat, dest, 'copy')
|
|
return handleFilterAndCopy(destStat, src, dest, opts)
|
|
}
|
|
|
|
function handleFilterAndCopy (destStat, src, dest, opts) {
|
|
if (opts.filter && !opts.filter(src, dest)) return
|
|
const destParent = path.dirname(dest)
|
|
if (!fs.existsSync(destParent)) mkdirsSync(destParent)
|
|
return getStats(destStat, src, dest, opts)
|
|
}
|
|
|
|
function startCopy (destStat, src, dest, opts) {
|
|
if (opts.filter && !opts.filter(src, dest)) return
|
|
return getStats(destStat, src, dest, opts)
|
|
}
|
|
|
|
function getStats (destStat, src, dest, opts) {
|
|
const statSync = opts.dereference ? fs.statSync : fs.lstatSync
|
|
const srcStat = statSync(src)
|
|
|
|
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)
|
|
else if (srcStat.isFile() ||
|
|
srcStat.isCharacterDevice() ||
|
|
srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts)
|
|
else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)
|
|
else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`)
|
|
else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`)
|
|
throw new Error(`Unknown file: ${src}`)
|
|
}
|
|
|
|
function onFile (srcStat, destStat, src, dest, opts) {
|
|
if (!destStat) return copyFile(srcStat, src, dest, opts)
|
|
return mayCopyFile(srcStat, src, dest, opts)
|
|
}
|
|
|
|
function mayCopyFile (srcStat, src, dest, opts) {
|
|
if (opts.overwrite) {
|
|
fs.unlinkSync(dest)
|
|
return copyFile(srcStat, src, dest, opts)
|
|
} else if (opts.errorOnExist) {
|
|
throw new Error(`'${dest}' already exists`)
|
|
}
|
|
}
|
|
|
|
function copyFile (srcStat, src, dest, opts) {
|
|
fs.copyFileSync(src, dest)
|
|
if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)
|
|
return setDestMode(dest, srcStat.mode)
|
|
}
|
|
|
|
function handleTimestamps (srcMode, src, dest) {
|
|
// Make sure the file is writable before setting the timestamp
|
|
// otherwise open fails with EPERM when invoked with 'r+'
|
|
// (through utimes call)
|
|
if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)
|
|
return setDestTimestamps(src, dest)
|
|
}
|
|
|
|
function fileIsNotWritable (srcMode) {
|
|
return (srcMode & 0o200) === 0
|
|
}
|
|
|
|
function makeFileWritable (dest, srcMode) {
|
|
return setDestMode(dest, srcMode | 0o200)
|
|
}
|
|
|
|
function setDestMode (dest, srcMode) {
|
|
return fs.chmodSync(dest, srcMode)
|
|
}
|
|
|
|
function setDestTimestamps (src, dest) {
|
|
// The initial srcStat.atime cannot be trusted
|
|
// because it is modified by the read(2) system call
|
|
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
|
const updatedSrcStat = fs.statSync(src)
|
|
return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
|
|
}
|
|
|
|
function onDir (srcStat, destStat, src, dest, opts) {
|
|
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)
|
|
return copyDir(src, dest, opts)
|
|
}
|
|
|
|
function mkDirAndCopy (srcMode, src, dest, opts) {
|
|
fs.mkdirSync(dest)
|
|
copyDir(src, dest, opts)
|
|
return setDestMode(dest, srcMode)
|
|
}
|
|
|
|
function copyDir (src, dest, opts) {
|
|
fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts))
|
|
}
|
|
|
|
function copyDirItem (item, src, dest, opts) {
|
|
const srcItem = path.join(src, item)
|
|
const destItem = path.join(dest, item)
|
|
const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts)
|
|
return startCopy(destStat, srcItem, destItem, opts)
|
|
}
|
|
|
|
function onLink (destStat, src, dest, opts) {
|
|
let resolvedSrc = fs.readlinkSync(src)
|
|
if (opts.dereference) {
|
|
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
|
|
}
|
|
|
|
if (!destStat) {
|
|
return fs.symlinkSync(resolvedSrc, dest)
|
|
} else {
|
|
let resolvedDest
|
|
try {
|
|
resolvedDest = fs.readlinkSync(dest)
|
|
} catch (err) {
|
|
// dest exists and is a regular file or directory,
|
|
// Windows may throw UNKNOWN error. If dest already exists,
|
|
// fs throws error anyway, so no need to guard against it here.
|
|
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest)
|
|
throw err
|
|
}
|
|
if (opts.dereference) {
|
|
resolvedDest = path.resolve(process.cwd(), resolvedDest)
|
|
}
|
|
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
|
|
throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)
|
|
}
|
|
|
|
// prevent copy if src is a subdir of dest since unlinking
|
|
// dest in this case would result in removing src contents
|
|
// and therefore a broken symlink would be created.
|
|
if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
|
|
throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)
|
|
}
|
|
return copyLink(resolvedSrc, dest)
|
|
}
|
|
}
|
|
|
|
function copyLink (resolvedSrc, dest) {
|
|
fs.unlinkSync(dest)
|
|
return fs.symlinkSync(resolvedSrc, dest)
|
|
}
|
|
|
|
module.exports = copySync
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8834:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(1017)
|
|
const mkdirs = (__nccwpck_require__(8605).mkdirs)
|
|
const pathExists = (__nccwpck_require__(3835).pathExists)
|
|
const utimesMillis = (__nccwpck_require__(2548).utimesMillis)
|
|
const stat = __nccwpck_require__(3901)
|
|
|
|
function copy (src, dest, opts, cb) {
|
|
if (typeof opts === 'function' && !cb) {
|
|
cb = opts
|
|
opts = {}
|
|
} else if (typeof opts === 'function') {
|
|
opts = { filter: opts }
|
|
}
|
|
|
|
cb = cb || function () {}
|
|
opts = opts || {}
|
|
|
|
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
|
|
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
|
|
|
|
// Warn about using preserveTimestamps on 32-bit node
|
|
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
|
process.emitWarning(
|
|
'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' +
|
|
'\tsee https://github.com/jprichardson/node-fs-extra/issues/269',
|
|
'Warning', 'fs-extra-WARN0001'
|
|
)
|
|
}
|
|
|
|
stat.checkPaths(src, dest, 'copy', opts, (err, stats) => {
|
|
if (err) return cb(err)
|
|
const { srcStat, destStat } = stats
|
|
stat.checkParentPaths(src, srcStat, dest, 'copy', err => {
|
|
if (err) return cb(err)
|
|
if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb)
|
|
return checkParentDir(destStat, src, dest, opts, cb)
|
|
})
|
|
})
|
|
}
|
|
|
|
function checkParentDir (destStat, src, dest, opts, cb) {
|
|
const destParent = path.dirname(dest)
|
|
pathExists(destParent, (err, dirExists) => {
|
|
if (err) return cb(err)
|
|
if (dirExists) return getStats(destStat, src, dest, opts, cb)
|
|
mkdirs(destParent, err => {
|
|
if (err) return cb(err)
|
|
return getStats(destStat, src, dest, opts, cb)
|
|
})
|
|
})
|
|
}
|
|
|
|
function handleFilter (onInclude, destStat, src, dest, opts, cb) {
|
|
Promise.resolve(opts.filter(src, dest)).then(include => {
|
|
if (include) return onInclude(destStat, src, dest, opts, cb)
|
|
return cb()
|
|
}, error => cb(error))
|
|
}
|
|
|
|
function startCopy (destStat, src, dest, opts, cb) {
|
|
if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb)
|
|
return getStats(destStat, src, dest, opts, cb)
|
|
}
|
|
|
|
function getStats (destStat, src, dest, opts, cb) {
|
|
const stat = opts.dereference ? fs.stat : fs.lstat
|
|
stat(src, (err, srcStat) => {
|
|
if (err) return cb(err)
|
|
|
|
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb)
|
|
else if (srcStat.isFile() ||
|
|
srcStat.isCharacterDevice() ||
|
|
srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb)
|
|
else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb)
|
|
else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`))
|
|
else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`))
|
|
return cb(new Error(`Unknown file: ${src}`))
|
|
})
|
|
}
|
|
|
|
function onFile (srcStat, destStat, src, dest, opts, cb) {
|
|
if (!destStat) return copyFile(srcStat, src, dest, opts, cb)
|
|
return mayCopyFile(srcStat, src, dest, opts, cb)
|
|
}
|
|
|
|
function mayCopyFile (srcStat, src, dest, opts, cb) {
|
|
if (opts.overwrite) {
|
|
fs.unlink(dest, err => {
|
|
if (err) return cb(err)
|
|
return copyFile(srcStat, src, dest, opts, cb)
|
|
})
|
|
} else if (opts.errorOnExist) {
|
|
return cb(new Error(`'${dest}' already exists`))
|
|
} else return cb()
|
|
}
|
|
|
|
function copyFile (srcStat, src, dest, opts, cb) {
|
|
fs.copyFile(src, dest, err => {
|
|
if (err) return cb(err)
|
|
if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb)
|
|
return setDestMode(dest, srcStat.mode, cb)
|
|
})
|
|
}
|
|
|
|
function handleTimestampsAndMode (srcMode, src, dest, cb) {
|
|
// Make sure the file is writable before setting the timestamp
|
|
// otherwise open fails with EPERM when invoked with 'r+'
|
|
// (through utimes call)
|
|
if (fileIsNotWritable(srcMode)) {
|
|
return makeFileWritable(dest, srcMode, err => {
|
|
if (err) return cb(err)
|
|
return setDestTimestampsAndMode(srcMode, src, dest, cb)
|
|
})
|
|
}
|
|
return setDestTimestampsAndMode(srcMode, src, dest, cb)
|
|
}
|
|
|
|
function fileIsNotWritable (srcMode) {
|
|
return (srcMode & 0o200) === 0
|
|
}
|
|
|
|
function makeFileWritable (dest, srcMode, cb) {
|
|
return setDestMode(dest, srcMode | 0o200, cb)
|
|
}
|
|
|
|
function setDestTimestampsAndMode (srcMode, src, dest, cb) {
|
|
setDestTimestamps(src, dest, err => {
|
|
if (err) return cb(err)
|
|
return setDestMode(dest, srcMode, cb)
|
|
})
|
|
}
|
|
|
|
function setDestMode (dest, srcMode, cb) {
|
|
return fs.chmod(dest, srcMode, cb)
|
|
}
|
|
|
|
function setDestTimestamps (src, dest, cb) {
|
|
// The initial srcStat.atime cannot be trusted
|
|
// because it is modified by the read(2) system call
|
|
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
|
fs.stat(src, (err, updatedSrcStat) => {
|
|
if (err) return cb(err)
|
|
return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb)
|
|
})
|
|
}
|
|
|
|
function onDir (srcStat, destStat, src, dest, opts, cb) {
|
|
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb)
|
|
return copyDir(src, dest, opts, cb)
|
|
}
|
|
|
|
function mkDirAndCopy (srcMode, src, dest, opts, cb) {
|
|
fs.mkdir(dest, err => {
|
|
if (err) return cb(err)
|
|
copyDir(src, dest, opts, err => {
|
|
if (err) return cb(err)
|
|
return setDestMode(dest, srcMode, cb)
|
|
})
|
|
})
|
|
}
|
|
|
|
function copyDir (src, dest, opts, cb) {
|
|
fs.readdir(src, (err, items) => {
|
|
if (err) return cb(err)
|
|
return copyDirItems(items, src, dest, opts, cb)
|
|
})
|
|
}
|
|
|
|
function copyDirItems (items, src, dest, opts, cb) {
|
|
const item = items.pop()
|
|
if (!item) return cb()
|
|
return copyDirItem(items, item, src, dest, opts, cb)
|
|
}
|
|
|
|
function copyDirItem (items, item, src, dest, opts, cb) {
|
|
const srcItem = path.join(src, item)
|
|
const destItem = path.join(dest, item)
|
|
stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => {
|
|
if (err) return cb(err)
|
|
const { destStat } = stats
|
|
startCopy(destStat, srcItem, destItem, opts, err => {
|
|
if (err) return cb(err)
|
|
return copyDirItems(items, src, dest, opts, cb)
|
|
})
|
|
})
|
|
}
|
|
|
|
function onLink (destStat, src, dest, opts, cb) {
|
|
fs.readlink(src, (err, resolvedSrc) => {
|
|
if (err) return cb(err)
|
|
if (opts.dereference) {
|
|
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
|
|
}
|
|
|
|
if (!destStat) {
|
|
return fs.symlink(resolvedSrc, dest, cb)
|
|
} else {
|
|
fs.readlink(dest, (err, resolvedDest) => {
|
|
if (err) {
|
|
// dest exists and is a regular file or directory,
|
|
// Windows may throw UNKNOWN error. If dest already exists,
|
|
// fs throws error anyway, so no need to guard against it here.
|
|
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb)
|
|
return cb(err)
|
|
}
|
|
if (opts.dereference) {
|
|
resolvedDest = path.resolve(process.cwd(), resolvedDest)
|
|
}
|
|
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
|
|
return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`))
|
|
}
|
|
|
|
// do not copy if src is a subdir of dest since unlinking
|
|
// dest in this case would result in removing src contents
|
|
// and therefore a broken symlink would be created.
|
|
if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
|
|
return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`))
|
|
}
|
|
return copyLink(resolvedSrc, dest, cb)
|
|
})
|
|
}
|
|
})
|
|
}
|
|
|
|
function copyLink (resolvedSrc, dest, cb) {
|
|
fs.unlink(dest, err => {
|
|
if (err) return cb(err)
|
|
return fs.symlink(resolvedSrc, dest, cb)
|
|
})
|
|
}
|
|
|
|
module.exports = copy
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1335:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = (__nccwpck_require__(9046).fromCallback)
|
|
module.exports = {
|
|
copy: u(__nccwpck_require__(8834)),
|
|
copySync: __nccwpck_require__(9618)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6970:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = (__nccwpck_require__(9046).fromPromise)
|
|
const fs = __nccwpck_require__(1176)
|
|
const path = __nccwpck_require__(1017)
|
|
const mkdir = __nccwpck_require__(8605)
|
|
const remove = __nccwpck_require__(7357)
|
|
|
|
const emptyDir = u(async function emptyDir (dir) {
|
|
let items
|
|
try {
|
|
items = await fs.readdir(dir)
|
|
} catch {
|
|
return mkdir.mkdirs(dir)
|
|
}
|
|
|
|
return Promise.all(items.map(item => remove.remove(path.join(dir, item))))
|
|
})
|
|
|
|
function emptyDirSync (dir) {
|
|
let items
|
|
try {
|
|
items = fs.readdirSync(dir)
|
|
} catch {
|
|
return mkdir.mkdirsSync(dir)
|
|
}
|
|
|
|
items.forEach(item => {
|
|
item = path.join(dir, item)
|
|
remove.removeSync(item)
|
|
})
|
|
}
|
|
|
|
module.exports = {
|
|
emptyDirSync,
|
|
emptydirSync: emptyDirSync,
|
|
emptyDir,
|
|
emptydir: emptyDir
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2164:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = (__nccwpck_require__(9046).fromCallback)
|
|
const path = __nccwpck_require__(1017)
|
|
const fs = __nccwpck_require__(7758)
|
|
const mkdir = __nccwpck_require__(8605)
|
|
|
|
function createFile (file, callback) {
|
|
function makeFile () {
|
|
fs.writeFile(file, '', err => {
|
|
if (err) return callback(err)
|
|
callback()
|
|
})
|
|
}
|
|
|
|
fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err
|
|
if (!err && stats.isFile()) return callback()
|
|
const dir = path.dirname(file)
|
|
fs.stat(dir, (err, stats) => {
|
|
if (err) {
|
|
// if the directory doesn't exist, make it
|
|
if (err.code === 'ENOENT') {
|
|
return mkdir.mkdirs(dir, err => {
|
|
if (err) return callback(err)
|
|
makeFile()
|
|
})
|
|
}
|
|
return callback(err)
|
|
}
|
|
|
|
if (stats.isDirectory()) makeFile()
|
|
else {
|
|
// parent is not a directory
|
|
// This is just to cause an internal ENOTDIR error to be thrown
|
|
fs.readdir(dir, err => {
|
|
if (err) return callback(err)
|
|
})
|
|
}
|
|
})
|
|
})
|
|
}
|
|
|
|
function createFileSync (file) {
|
|
let stats
|
|
try {
|
|
stats = fs.statSync(file)
|
|
} catch {}
|
|
if (stats && stats.isFile()) return
|
|
|
|
const dir = path.dirname(file)
|
|
try {
|
|
if (!fs.statSync(dir).isDirectory()) {
|
|
// parent is not a directory
|
|
// This is just to cause an internal ENOTDIR error to be thrown
|
|
fs.readdirSync(dir)
|
|
}
|
|
} catch (err) {
|
|
// If the stat call above failed because the directory doesn't exist, create it
|
|
if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)
|
|
else throw err
|
|
}
|
|
|
|
fs.writeFileSync(file, '')
|
|
}
|
|
|
|
module.exports = {
|
|
createFile: u(createFile),
|
|
createFileSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 55:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const { createFile, createFileSync } = __nccwpck_require__(2164)
|
|
const { createLink, createLinkSync } = __nccwpck_require__(3797)
|
|
const { createSymlink, createSymlinkSync } = __nccwpck_require__(2549)
|
|
|
|
module.exports = {
|
|
// file
|
|
createFile,
|
|
createFileSync,
|
|
ensureFile: createFile,
|
|
ensureFileSync: createFileSync,
|
|
// link
|
|
createLink,
|
|
createLinkSync,
|
|
ensureLink: createLink,
|
|
ensureLinkSync: createLinkSync,
|
|
// symlink
|
|
createSymlink,
|
|
createSymlinkSync,
|
|
ensureSymlink: createSymlink,
|
|
ensureSymlinkSync: createSymlinkSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3797:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = (__nccwpck_require__(9046).fromCallback)
|
|
const path = __nccwpck_require__(1017)
|
|
const fs = __nccwpck_require__(7758)
|
|
const mkdir = __nccwpck_require__(8605)
|
|
const pathExists = (__nccwpck_require__(3835).pathExists)
|
|
const { areIdentical } = __nccwpck_require__(3901)
|
|
|
|
function createLink (srcpath, dstpath, callback) {
|
|
function makeLink (srcpath, dstpath) {
|
|
fs.link(srcpath, dstpath, err => {
|
|
if (err) return callback(err)
|
|
callback(null)
|
|
})
|
|
}
|
|
|
|
fs.lstat(dstpath, (_, dstStat) => {
|
|
fs.lstat(srcpath, (err, srcStat) => {
|
|
if (err) {
|
|
err.message = err.message.replace('lstat', 'ensureLink')
|
|
return callback(err)
|
|
}
|
|
if (dstStat && areIdentical(srcStat, dstStat)) return callback(null)
|
|
|
|
const dir = path.dirname(dstpath)
|
|
pathExists(dir, (err, dirExists) => {
|
|
if (err) return callback(err)
|
|
if (dirExists) return makeLink(srcpath, dstpath)
|
|
mkdir.mkdirs(dir, err => {
|
|
if (err) return callback(err)
|
|
makeLink(srcpath, dstpath)
|
|
})
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
function createLinkSync (srcpath, dstpath) {
|
|
let dstStat
|
|
try {
|
|
dstStat = fs.lstatSync(dstpath)
|
|
} catch {}
|
|
|
|
try {
|
|
const srcStat = fs.lstatSync(srcpath)
|
|
if (dstStat && areIdentical(srcStat, dstStat)) return
|
|
} catch (err) {
|
|
err.message = err.message.replace('lstat', 'ensureLink')
|
|
throw err
|
|
}
|
|
|
|
const dir = path.dirname(dstpath)
|
|
const dirExists = fs.existsSync(dir)
|
|
if (dirExists) return fs.linkSync(srcpath, dstpath)
|
|
mkdir.mkdirsSync(dir)
|
|
|
|
return fs.linkSync(srcpath, dstpath)
|
|
}
|
|
|
|
module.exports = {
|
|
createLink: u(createLink),
|
|
createLinkSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3727:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const path = __nccwpck_require__(1017)
|
|
const fs = __nccwpck_require__(7758)
|
|
const pathExists = (__nccwpck_require__(3835).pathExists)
|
|
|
|
/**
|
|
* Function that returns two types of paths, one relative to symlink, and one
|
|
* relative to the current working directory. Checks if path is absolute or
|
|
* relative. If the path is relative, this function checks if the path is
|
|
* relative to symlink or relative to current working directory. This is an
|
|
* initiative to find a smarter `srcpath` to supply when building symlinks.
|
|
* This allows you to determine which path to use out of one of three possible
|
|
* types of source paths. The first is an absolute path. This is detected by
|
|
* `path.isAbsolute()`. When an absolute path is provided, it is checked to
|
|
* see if it exists. If it does it's used, if not an error is returned
|
|
* (callback)/ thrown (sync). The other two options for `srcpath` are a
|
|
* relative url. By default Node's `fs.symlink` works by creating a symlink
|
|
* using `dstpath` and expects the `srcpath` to be relative to the newly
|
|
* created symlink. If you provide a `srcpath` that does not exist on the file
|
|
* system it results in a broken symlink. To minimize this, the function
|
|
* checks to see if the 'relative to symlink' source file exists, and if it
|
|
* does it will use it. If it does not, it checks if there's a file that
|
|
* exists that is relative to the current working directory, if does its used.
|
|
* This preserves the expectations of the original fs.symlink spec and adds
|
|
* the ability to pass in `relative to current working direcotry` paths.
|
|
*/
|
|
|
|
function symlinkPaths (srcpath, dstpath, callback) {
|
|
if (path.isAbsolute(srcpath)) {
|
|
return fs.lstat(srcpath, (err) => {
|
|
if (err) {
|
|
err.message = err.message.replace('lstat', 'ensureSymlink')
|
|
return callback(err)
|
|
}
|
|
return callback(null, {
|
|
toCwd: srcpath,
|
|
toDst: srcpath
|
|
})
|
|
})
|
|
} else {
|
|
const dstdir = path.dirname(dstpath)
|
|
const relativeToDst = path.join(dstdir, srcpath)
|
|
return pathExists(relativeToDst, (err, exists) => {
|
|
if (err) return callback(err)
|
|
if (exists) {
|
|
return callback(null, {
|
|
toCwd: relativeToDst,
|
|
toDst: srcpath
|
|
})
|
|
} else {
|
|
return fs.lstat(srcpath, (err) => {
|
|
if (err) {
|
|
err.message = err.message.replace('lstat', 'ensureSymlink')
|
|
return callback(err)
|
|
}
|
|
return callback(null, {
|
|
toCwd: srcpath,
|
|
toDst: path.relative(dstdir, srcpath)
|
|
})
|
|
})
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
function symlinkPathsSync (srcpath, dstpath) {
|
|
let exists
|
|
if (path.isAbsolute(srcpath)) {
|
|
exists = fs.existsSync(srcpath)
|
|
if (!exists) throw new Error('absolute srcpath does not exist')
|
|
return {
|
|
toCwd: srcpath,
|
|
toDst: srcpath
|
|
}
|
|
} else {
|
|
const dstdir = path.dirname(dstpath)
|
|
const relativeToDst = path.join(dstdir, srcpath)
|
|
exists = fs.existsSync(relativeToDst)
|
|
if (exists) {
|
|
return {
|
|
toCwd: relativeToDst,
|
|
toDst: srcpath
|
|
}
|
|
} else {
|
|
exists = fs.existsSync(srcpath)
|
|
if (!exists) throw new Error('relative srcpath does not exist')
|
|
return {
|
|
toCwd: srcpath,
|
|
toDst: path.relative(dstdir, srcpath)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
module.exports = {
|
|
symlinkPaths,
|
|
symlinkPathsSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8254:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
|
|
function symlinkType (srcpath, type, callback) {
|
|
callback = (typeof type === 'function') ? type : callback
|
|
type = (typeof type === 'function') ? false : type
|
|
if (type) return callback(null, type)
|
|
fs.lstat(srcpath, (err, stats) => {
|
|
if (err) return callback(null, 'file')
|
|
type = (stats && stats.isDirectory()) ? 'dir' : 'file'
|
|
callback(null, type)
|
|
})
|
|
}
|
|
|
|
function symlinkTypeSync (srcpath, type) {
|
|
let stats
|
|
|
|
if (type) return type
|
|
try {
|
|
stats = fs.lstatSync(srcpath)
|
|
} catch {
|
|
return 'file'
|
|
}
|
|
return (stats && stats.isDirectory()) ? 'dir' : 'file'
|
|
}
|
|
|
|
module.exports = {
|
|
symlinkType,
|
|
symlinkTypeSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2549:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = (__nccwpck_require__(9046).fromCallback)
|
|
const path = __nccwpck_require__(1017)
|
|
const fs = __nccwpck_require__(1176)
|
|
const _mkdirs = __nccwpck_require__(8605)
|
|
const mkdirs = _mkdirs.mkdirs
|
|
const mkdirsSync = _mkdirs.mkdirsSync
|
|
|
|
const _symlinkPaths = __nccwpck_require__(3727)
|
|
const symlinkPaths = _symlinkPaths.symlinkPaths
|
|
const symlinkPathsSync = _symlinkPaths.symlinkPathsSync
|
|
|
|
const _symlinkType = __nccwpck_require__(8254)
|
|
const symlinkType = _symlinkType.symlinkType
|
|
const symlinkTypeSync = _symlinkType.symlinkTypeSync
|
|
|
|
const pathExists = (__nccwpck_require__(3835).pathExists)
|
|
|
|
const { areIdentical } = __nccwpck_require__(3901)
|
|
|
|
function createSymlink (srcpath, dstpath, type, callback) {
|
|
callback = (typeof type === 'function') ? type : callback
|
|
type = (typeof type === 'function') ? false : type
|
|
|
|
fs.lstat(dstpath, (err, stats) => {
|
|
if (!err && stats.isSymbolicLink()) {
|
|
Promise.all([
|
|
fs.stat(srcpath),
|
|
fs.stat(dstpath)
|
|
]).then(([srcStat, dstStat]) => {
|
|
if (areIdentical(srcStat, dstStat)) return callback(null)
|
|
_createSymlink(srcpath, dstpath, type, callback)
|
|
})
|
|
} else _createSymlink(srcpath, dstpath, type, callback)
|
|
})
|
|
}
|
|
|
|
function _createSymlink (srcpath, dstpath, type, callback) {
|
|
symlinkPaths(srcpath, dstpath, (err, relative) => {
|
|
if (err) return callback(err)
|
|
srcpath = relative.toDst
|
|
symlinkType(relative.toCwd, type, (err, type) => {
|
|
if (err) return callback(err)
|
|
const dir = path.dirname(dstpath)
|
|
pathExists(dir, (err, dirExists) => {
|
|
if (err) return callback(err)
|
|
if (dirExists) return fs.symlink(srcpath, dstpath, type, callback)
|
|
mkdirs(dir, err => {
|
|
if (err) return callback(err)
|
|
fs.symlink(srcpath, dstpath, type, callback)
|
|
})
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
function createSymlinkSync (srcpath, dstpath, type) {
|
|
let stats
|
|
try {
|
|
stats = fs.lstatSync(dstpath)
|
|
} catch {}
|
|
if (stats && stats.isSymbolicLink()) {
|
|
const srcStat = fs.statSync(srcpath)
|
|
const dstStat = fs.statSync(dstpath)
|
|
if (areIdentical(srcStat, dstStat)) return
|
|
}
|
|
|
|
const relative = symlinkPathsSync(srcpath, dstpath)
|
|
srcpath = relative.toDst
|
|
type = symlinkTypeSync(relative.toCwd, type)
|
|
const dir = path.dirname(dstpath)
|
|
const exists = fs.existsSync(dir)
|
|
if (exists) return fs.symlinkSync(srcpath, dstpath, type)
|
|
mkdirsSync(dir)
|
|
return fs.symlinkSync(srcpath, dstpath, type)
|
|
}
|
|
|
|
module.exports = {
|
|
createSymlink: u(createSymlink),
|
|
createSymlinkSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1176:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
// This is adapted from https://github.com/normalize/mz
|
|
// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors
|
|
const u = (__nccwpck_require__(9046).fromCallback)
|
|
const fs = __nccwpck_require__(7758)
|
|
|
|
const api = [
|
|
'access',
|
|
'appendFile',
|
|
'chmod',
|
|
'chown',
|
|
'close',
|
|
'copyFile',
|
|
'fchmod',
|
|
'fchown',
|
|
'fdatasync',
|
|
'fstat',
|
|
'fsync',
|
|
'ftruncate',
|
|
'futimes',
|
|
'lchmod',
|
|
'lchown',
|
|
'link',
|
|
'lstat',
|
|
'mkdir',
|
|
'mkdtemp',
|
|
'open',
|
|
'opendir',
|
|
'readdir',
|
|
'readFile',
|
|
'readlink',
|
|
'realpath',
|
|
'rename',
|
|
'rm',
|
|
'rmdir',
|
|
'stat',
|
|
'symlink',
|
|
'truncate',
|
|
'unlink',
|
|
'utimes',
|
|
'writeFile'
|
|
].filter(key => {
|
|
// Some commands are not available on some systems. Ex:
|
|
// fs.opendir was added in Node.js v12.12.0
|
|
// fs.rm was added in Node.js v14.14.0
|
|
// fs.lchown is not available on at least some Linux
|
|
return typeof fs[key] === 'function'
|
|
})
|
|
|
|
// Export cloned fs:
|
|
Object.assign(exports, fs)
|
|
|
|
// Universalify async methods:
|
|
api.forEach(method => {
|
|
exports[method] = u(fs[method])
|
|
})
|
|
|
|
// We differ from mz/fs in that we still ship the old, broken, fs.exists()
|
|
// since we are a drop-in replacement for the native module
|
|
exports.exists = function (filename, callback) {
|
|
if (typeof callback === 'function') {
|
|
return fs.exists(filename, callback)
|
|
}
|
|
return new Promise(resolve => {
|
|
return fs.exists(filename, resolve)
|
|
})
|
|
}
|
|
|
|
// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args
|
|
|
|
exports.read = function (fd, buffer, offset, length, position, callback) {
|
|
if (typeof callback === 'function') {
|
|
return fs.read(fd, buffer, offset, length, position, callback)
|
|
}
|
|
return new Promise((resolve, reject) => {
|
|
fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {
|
|
if (err) return reject(err)
|
|
resolve({ bytesRead, buffer })
|
|
})
|
|
})
|
|
}
|
|
|
|
// Function signature can be
|
|
// fs.write(fd, buffer[, offset[, length[, position]]], callback)
|
|
// OR
|
|
// fs.write(fd, string[, position[, encoding]], callback)
|
|
// We need to handle both cases, so we use ...args
|
|
exports.write = function (fd, buffer, ...args) {
|
|
if (typeof args[args.length - 1] === 'function') {
|
|
return fs.write(fd, buffer, ...args)
|
|
}
|
|
|
|
return new Promise((resolve, reject) => {
|
|
fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => {
|
|
if (err) return reject(err)
|
|
resolve({ bytesWritten, buffer })
|
|
})
|
|
})
|
|
}
|
|
|
|
// fs.writev only available in Node v12.9.0+
|
|
if (typeof fs.writev === 'function') {
|
|
// Function signature is
|
|
// s.writev(fd, buffers[, position], callback)
|
|
// We need to handle the optional arg, so we use ...args
|
|
exports.writev = function (fd, buffers, ...args) {
|
|
if (typeof args[args.length - 1] === 'function') {
|
|
return fs.writev(fd, buffers, ...args)
|
|
}
|
|
|
|
return new Promise((resolve, reject) => {
|
|
fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {
|
|
if (err) return reject(err)
|
|
resolve({ bytesWritten, buffers })
|
|
})
|
|
})
|
|
}
|
|
}
|
|
|
|
// fs.realpath.native sometimes not available if fs is monkey-patched
|
|
if (typeof fs.realpath.native === 'function') {
|
|
exports.realpath.native = u(fs.realpath.native)
|
|
} else {
|
|
process.emitWarning(
|
|
'fs.realpath.native is not a function. Is fs being monkey-patched?',
|
|
'Warning', 'fs-extra-WARN0003'
|
|
)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5630:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
module.exports = {
|
|
// Export promiseified graceful-fs:
|
|
...__nccwpck_require__(1176),
|
|
// Export extra methods:
|
|
...__nccwpck_require__(1335),
|
|
...__nccwpck_require__(6970),
|
|
...__nccwpck_require__(55),
|
|
...__nccwpck_require__(213),
|
|
...__nccwpck_require__(8605),
|
|
...__nccwpck_require__(1497),
|
|
...__nccwpck_require__(1832),
|
|
...__nccwpck_require__(3835),
|
|
...__nccwpck_require__(7357)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 213:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = (__nccwpck_require__(9046).fromPromise)
|
|
const jsonFile = __nccwpck_require__(8970)
|
|
|
|
jsonFile.outputJson = u(__nccwpck_require__(531))
|
|
jsonFile.outputJsonSync = __nccwpck_require__(9421)
|
|
// aliases
|
|
jsonFile.outputJSON = jsonFile.outputJson
|
|
jsonFile.outputJSONSync = jsonFile.outputJsonSync
|
|
jsonFile.writeJSON = jsonFile.writeJson
|
|
jsonFile.writeJSONSync = jsonFile.writeJsonSync
|
|
jsonFile.readJSON = jsonFile.readJson
|
|
jsonFile.readJSONSync = jsonFile.readJsonSync
|
|
|
|
module.exports = jsonFile
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8970:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const jsonFile = __nccwpck_require__(6160)
|
|
|
|
module.exports = {
|
|
// jsonfile exports
|
|
readJson: jsonFile.readFile,
|
|
readJsonSync: jsonFile.readFileSync,
|
|
writeJson: jsonFile.writeFile,
|
|
writeJsonSync: jsonFile.writeFileSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9421:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const { stringify } = __nccwpck_require__(5902)
|
|
const { outputFileSync } = __nccwpck_require__(1832)
|
|
|
|
function outputJsonSync (file, data, options) {
|
|
const str = stringify(data, options)
|
|
|
|
outputFileSync(file, str, options)
|
|
}
|
|
|
|
module.exports = outputJsonSync
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 531:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const { stringify } = __nccwpck_require__(5902)
|
|
const { outputFile } = __nccwpck_require__(1832)
|
|
|
|
async function outputJson (file, data, options = {}) {
|
|
const str = stringify(data, options)
|
|
|
|
await outputFile(file, str, options)
|
|
}
|
|
|
|
module.exports = outputJson
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8605:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
const u = (__nccwpck_require__(9046).fromPromise)
|
|
const { makeDir: _makeDir, makeDirSync } = __nccwpck_require__(2751)
|
|
const makeDir = u(_makeDir)
|
|
|
|
module.exports = {
|
|
mkdirs: makeDir,
|
|
mkdirsSync: makeDirSync,
|
|
// alias
|
|
mkdirp: makeDir,
|
|
mkdirpSync: makeDirSync,
|
|
ensureDir: makeDir,
|
|
ensureDirSync: makeDirSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2751:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
const fs = __nccwpck_require__(1176)
|
|
const { checkPath } = __nccwpck_require__(9907)
|
|
|
|
const getMode = options => {
|
|
const defaults = { mode: 0o777 }
|
|
if (typeof options === 'number') return options
|
|
return ({ ...defaults, ...options }).mode
|
|
}
|
|
|
|
module.exports.makeDir = async (dir, options) => {
|
|
checkPath(dir)
|
|
|
|
return fs.mkdir(dir, {
|
|
mode: getMode(options),
|
|
recursive: true
|
|
})
|
|
}
|
|
|
|
module.exports.makeDirSync = (dir, options) => {
|
|
checkPath(dir)
|
|
|
|
return fs.mkdirSync(dir, {
|
|
mode: getMode(options),
|
|
recursive: true
|
|
})
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9907:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
// Adapted from https://github.com/sindresorhus/make-dir
|
|
// Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
|
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
|
|
const path = __nccwpck_require__(1017)
|
|
|
|
// https://github.com/nodejs/node/issues/8987
|
|
// https://github.com/libuv/libuv/pull/1088
|
|
module.exports.checkPath = function checkPath (pth) {
|
|
if (process.platform === 'win32') {
|
|
const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, ''))
|
|
|
|
if (pathHasInvalidWinCharacters) {
|
|
const error = new Error(`Path contains invalid characters: ${pth}`)
|
|
error.code = 'EINVAL'
|
|
throw error
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1497:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = (__nccwpck_require__(9046).fromCallback)
|
|
module.exports = {
|
|
move: u(__nccwpck_require__(2231)),
|
|
moveSync: __nccwpck_require__(2047)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2047:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(1017)
|
|
const copySync = (__nccwpck_require__(1335).copySync)
|
|
const removeSync = (__nccwpck_require__(7357).removeSync)
|
|
const mkdirpSync = (__nccwpck_require__(8605).mkdirpSync)
|
|
const stat = __nccwpck_require__(3901)
|
|
|
|
function moveSync (src, dest, opts) {
|
|
opts = opts || {}
|
|
const overwrite = opts.overwrite || opts.clobber || false
|
|
|
|
const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts)
|
|
stat.checkParentPathsSync(src, srcStat, dest, 'move')
|
|
if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest))
|
|
return doRename(src, dest, overwrite, isChangingCase)
|
|
}
|
|
|
|
function isParentRoot (dest) {
|
|
const parent = path.dirname(dest)
|
|
const parsedPath = path.parse(parent)
|
|
return parsedPath.root === parent
|
|
}
|
|
|
|
function doRename (src, dest, overwrite, isChangingCase) {
|
|
if (isChangingCase) return rename(src, dest, overwrite)
|
|
if (overwrite) {
|
|
removeSync(dest)
|
|
return rename(src, dest, overwrite)
|
|
}
|
|
if (fs.existsSync(dest)) throw new Error('dest already exists.')
|
|
return rename(src, dest, overwrite)
|
|
}
|
|
|
|
function rename (src, dest, overwrite) {
|
|
try {
|
|
fs.renameSync(src, dest)
|
|
} catch (err) {
|
|
if (err.code !== 'EXDEV') throw err
|
|
return moveAcrossDevice(src, dest, overwrite)
|
|
}
|
|
}
|
|
|
|
function moveAcrossDevice (src, dest, overwrite) {
|
|
const opts = {
|
|
overwrite,
|
|
errorOnExist: true
|
|
}
|
|
copySync(src, dest, opts)
|
|
return removeSync(src)
|
|
}
|
|
|
|
module.exports = moveSync
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2231:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(1017)
|
|
const copy = (__nccwpck_require__(1335).copy)
|
|
const remove = (__nccwpck_require__(7357).remove)
|
|
const mkdirp = (__nccwpck_require__(8605).mkdirp)
|
|
const pathExists = (__nccwpck_require__(3835).pathExists)
|
|
const stat = __nccwpck_require__(3901)
|
|
|
|
function move (src, dest, opts, cb) {
|
|
if (typeof opts === 'function') {
|
|
cb = opts
|
|
opts = {}
|
|
}
|
|
|
|
opts = opts || {}
|
|
|
|
const overwrite = opts.overwrite || opts.clobber || false
|
|
|
|
stat.checkPaths(src, dest, 'move', opts, (err, stats) => {
|
|
if (err) return cb(err)
|
|
const { srcStat, isChangingCase = false } = stats
|
|
stat.checkParentPaths(src, srcStat, dest, 'move', err => {
|
|
if (err) return cb(err)
|
|
if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb)
|
|
mkdirp(path.dirname(dest), err => {
|
|
if (err) return cb(err)
|
|
return doRename(src, dest, overwrite, isChangingCase, cb)
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
function isParentRoot (dest) {
|
|
const parent = path.dirname(dest)
|
|
const parsedPath = path.parse(parent)
|
|
return parsedPath.root === parent
|
|
}
|
|
|
|
function doRename (src, dest, overwrite, isChangingCase, cb) {
|
|
if (isChangingCase) return rename(src, dest, overwrite, cb)
|
|
if (overwrite) {
|
|
return remove(dest, err => {
|
|
if (err) return cb(err)
|
|
return rename(src, dest, overwrite, cb)
|
|
})
|
|
}
|
|
pathExists(dest, (err, destExists) => {
|
|
if (err) return cb(err)
|
|
if (destExists) return cb(new Error('dest already exists.'))
|
|
return rename(src, dest, overwrite, cb)
|
|
})
|
|
}
|
|
|
|
function rename (src, dest, overwrite, cb) {
|
|
fs.rename(src, dest, err => {
|
|
if (!err) return cb()
|
|
if (err.code !== 'EXDEV') return cb(err)
|
|
return moveAcrossDevice(src, dest, overwrite, cb)
|
|
})
|
|
}
|
|
|
|
function moveAcrossDevice (src, dest, overwrite, cb) {
|
|
const opts = {
|
|
overwrite,
|
|
errorOnExist: true
|
|
}
|
|
copy(src, dest, opts, err => {
|
|
if (err) return cb(err)
|
|
return remove(src, cb)
|
|
})
|
|
}
|
|
|
|
module.exports = move
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1832:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const u = (__nccwpck_require__(9046).fromCallback)
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(1017)
|
|
const mkdir = __nccwpck_require__(8605)
|
|
const pathExists = (__nccwpck_require__(3835).pathExists)
|
|
|
|
function outputFile (file, data, encoding, callback) {
|
|
if (typeof encoding === 'function') {
|
|
callback = encoding
|
|
encoding = 'utf8'
|
|
}
|
|
|
|
const dir = path.dirname(file)
|
|
pathExists(dir, (err, itDoes) => {
|
|
if (err) return callback(err)
|
|
if (itDoes) return fs.writeFile(file, data, encoding, callback)
|
|
|
|
mkdir.mkdirs(dir, err => {
|
|
if (err) return callback(err)
|
|
|
|
fs.writeFile(file, data, encoding, callback)
|
|
})
|
|
})
|
|
}
|
|
|
|
function outputFileSync (file, ...args) {
|
|
const dir = path.dirname(file)
|
|
if (fs.existsSync(dir)) {
|
|
return fs.writeFileSync(file, ...args)
|
|
}
|
|
mkdir.mkdirsSync(dir)
|
|
fs.writeFileSync(file, ...args)
|
|
}
|
|
|
|
module.exports = {
|
|
outputFile: u(outputFile),
|
|
outputFileSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3835:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
const u = (__nccwpck_require__(9046).fromPromise)
|
|
const fs = __nccwpck_require__(1176)
|
|
|
|
function pathExists (path) {
|
|
return fs.access(path).then(() => true).catch(() => false)
|
|
}
|
|
|
|
module.exports = {
|
|
pathExists: u(pathExists),
|
|
pathExistsSync: fs.existsSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7357:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const u = (__nccwpck_require__(9046).fromCallback)
|
|
const rimraf = __nccwpck_require__(8761)
|
|
|
|
function remove (path, callback) {
|
|
// Node 14.14.0+
|
|
if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback)
|
|
rimraf(path, callback)
|
|
}
|
|
|
|
function removeSync (path) {
|
|
// Node 14.14.0+
|
|
if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true })
|
|
rimraf.sync(path)
|
|
}
|
|
|
|
module.exports = {
|
|
remove: u(remove),
|
|
removeSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8761:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
const path = __nccwpck_require__(1017)
|
|
const assert = __nccwpck_require__(9491)
|
|
|
|
const isWindows = (process.platform === 'win32')
|
|
|
|
function defaults (options) {
|
|
const methods = [
|
|
'unlink',
|
|
'chmod',
|
|
'stat',
|
|
'lstat',
|
|
'rmdir',
|
|
'readdir'
|
|
]
|
|
methods.forEach(m => {
|
|
options[m] = options[m] || fs[m]
|
|
m = m + 'Sync'
|
|
options[m] = options[m] || fs[m]
|
|
})
|
|
|
|
options.maxBusyTries = options.maxBusyTries || 3
|
|
}
|
|
|
|
function rimraf (p, options, cb) {
|
|
let busyTries = 0
|
|
|
|
if (typeof options === 'function') {
|
|
cb = options
|
|
options = {}
|
|
}
|
|
|
|
assert(p, 'rimraf: missing path')
|
|
assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')
|
|
assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required')
|
|
assert(options, 'rimraf: invalid options argument provided')
|
|
assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')
|
|
|
|
defaults(options)
|
|
|
|
rimraf_(p, options, function CB (er) {
|
|
if (er) {
|
|
if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') &&
|
|
busyTries < options.maxBusyTries) {
|
|
busyTries++
|
|
const time = busyTries * 100
|
|
// try again, with the same exact callback as this one.
|
|
return setTimeout(() => rimraf_(p, options, CB), time)
|
|
}
|
|
|
|
// already gone
|
|
if (er.code === 'ENOENT') er = null
|
|
}
|
|
|
|
cb(er)
|
|
})
|
|
}
|
|
|
|
// Two possible strategies.
|
|
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
|
|
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
|
|
//
|
|
// Both result in an extra syscall when you guess wrong. However, there
|
|
// are likely far more normal files in the world than directories. This
|
|
// is based on the assumption that a the average number of files per
|
|
// directory is >= 1.
|
|
//
|
|
// If anyone ever complains about this, then I guess the strategy could
|
|
// be made configurable somehow. But until then, YAGNI.
|
|
function rimraf_ (p, options, cb) {
|
|
assert(p)
|
|
assert(options)
|
|
assert(typeof cb === 'function')
|
|
|
|
// sunos lets the root user unlink directories, which is... weird.
|
|
// so we have to lstat here and make sure it's not a dir.
|
|
options.lstat(p, (er, st) => {
|
|
if (er && er.code === 'ENOENT') {
|
|
return cb(null)
|
|
}
|
|
|
|
// Windows can EPERM on stat. Life is suffering.
|
|
if (er && er.code === 'EPERM' && isWindows) {
|
|
return fixWinEPERM(p, options, er, cb)
|
|
}
|
|
|
|
if (st && st.isDirectory()) {
|
|
return rmdir(p, options, er, cb)
|
|
}
|
|
|
|
options.unlink(p, er => {
|
|
if (er) {
|
|
if (er.code === 'ENOENT') {
|
|
return cb(null)
|
|
}
|
|
if (er.code === 'EPERM') {
|
|
return (isWindows)
|
|
? fixWinEPERM(p, options, er, cb)
|
|
: rmdir(p, options, er, cb)
|
|
}
|
|
if (er.code === 'EISDIR') {
|
|
return rmdir(p, options, er, cb)
|
|
}
|
|
}
|
|
return cb(er)
|
|
})
|
|
})
|
|
}
|
|
|
|
function fixWinEPERM (p, options, er, cb) {
|
|
assert(p)
|
|
assert(options)
|
|
assert(typeof cb === 'function')
|
|
|
|
options.chmod(p, 0o666, er2 => {
|
|
if (er2) {
|
|
cb(er2.code === 'ENOENT' ? null : er)
|
|
} else {
|
|
options.stat(p, (er3, stats) => {
|
|
if (er3) {
|
|
cb(er3.code === 'ENOENT' ? null : er)
|
|
} else if (stats.isDirectory()) {
|
|
rmdir(p, options, er, cb)
|
|
} else {
|
|
options.unlink(p, cb)
|
|
}
|
|
})
|
|
}
|
|
})
|
|
}
|
|
|
|
function fixWinEPERMSync (p, options, er) {
|
|
let stats
|
|
|
|
assert(p)
|
|
assert(options)
|
|
|
|
try {
|
|
options.chmodSync(p, 0o666)
|
|
} catch (er2) {
|
|
if (er2.code === 'ENOENT') {
|
|
return
|
|
} else {
|
|
throw er
|
|
}
|
|
}
|
|
|
|
try {
|
|
stats = options.statSync(p)
|
|
} catch (er3) {
|
|
if (er3.code === 'ENOENT') {
|
|
return
|
|
} else {
|
|
throw er
|
|
}
|
|
}
|
|
|
|
if (stats.isDirectory()) {
|
|
rmdirSync(p, options, er)
|
|
} else {
|
|
options.unlinkSync(p)
|
|
}
|
|
}
|
|
|
|
function rmdir (p, options, originalEr, cb) {
|
|
assert(p)
|
|
assert(options)
|
|
assert(typeof cb === 'function')
|
|
|
|
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
|
|
// if we guessed wrong, and it's not a directory, then
|
|
// raise the original error.
|
|
options.rmdir(p, er => {
|
|
if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) {
|
|
rmkids(p, options, cb)
|
|
} else if (er && er.code === 'ENOTDIR') {
|
|
cb(originalEr)
|
|
} else {
|
|
cb(er)
|
|
}
|
|
})
|
|
}
|
|
|
|
function rmkids (p, options, cb) {
|
|
assert(p)
|
|
assert(options)
|
|
assert(typeof cb === 'function')
|
|
|
|
options.readdir(p, (er, files) => {
|
|
if (er) return cb(er)
|
|
|
|
let n = files.length
|
|
let errState
|
|
|
|
if (n === 0) return options.rmdir(p, cb)
|
|
|
|
files.forEach(f => {
|
|
rimraf(path.join(p, f), options, er => {
|
|
if (errState) {
|
|
return
|
|
}
|
|
if (er) return cb(errState = er)
|
|
if (--n === 0) {
|
|
options.rmdir(p, cb)
|
|
}
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
// this looks simpler, and is strictly *faster*, but will
|
|
// tie up the JavaScript thread and fail on excessively
|
|
// deep directory trees.
|
|
function rimrafSync (p, options) {
|
|
let st
|
|
|
|
options = options || {}
|
|
defaults(options)
|
|
|
|
assert(p, 'rimraf: missing path')
|
|
assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')
|
|
assert(options, 'rimraf: missing options')
|
|
assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')
|
|
|
|
try {
|
|
st = options.lstatSync(p)
|
|
} catch (er) {
|
|
if (er.code === 'ENOENT') {
|
|
return
|
|
}
|
|
|
|
// Windows can EPERM on stat. Life is suffering.
|
|
if (er.code === 'EPERM' && isWindows) {
|
|
fixWinEPERMSync(p, options, er)
|
|
}
|
|
}
|
|
|
|
try {
|
|
// sunos lets the root user unlink directories, which is... weird.
|
|
if (st && st.isDirectory()) {
|
|
rmdirSync(p, options, null)
|
|
} else {
|
|
options.unlinkSync(p)
|
|
}
|
|
} catch (er) {
|
|
if (er.code === 'ENOENT') {
|
|
return
|
|
} else if (er.code === 'EPERM') {
|
|
return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
|
|
} else if (er.code !== 'EISDIR') {
|
|
throw er
|
|
}
|
|
rmdirSync(p, options, er)
|
|
}
|
|
}
|
|
|
|
function rmdirSync (p, options, originalEr) {
|
|
assert(p)
|
|
assert(options)
|
|
|
|
try {
|
|
options.rmdirSync(p)
|
|
} catch (er) {
|
|
if (er.code === 'ENOTDIR') {
|
|
throw originalEr
|
|
} else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') {
|
|
rmkidsSync(p, options)
|
|
} else if (er.code !== 'ENOENT') {
|
|
throw er
|
|
}
|
|
}
|
|
}
|
|
|
|
function rmkidsSync (p, options) {
|
|
assert(p)
|
|
assert(options)
|
|
options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
|
|
|
|
if (isWindows) {
|
|
// We only end up here once we got ENOTEMPTY at least once, and
|
|
// at this point, we are guaranteed to have removed all the kids.
|
|
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
|
|
// try really hard to delete stuff on windows, because it has a
|
|
// PROFOUNDLY annoying habit of not closing handles promptly when
|
|
// files are deleted, resulting in spurious ENOTEMPTY errors.
|
|
const startTime = Date.now()
|
|
do {
|
|
try {
|
|
const ret = options.rmdirSync(p, options)
|
|
return ret
|
|
} catch {}
|
|
} while (Date.now() - startTime < 500) // give up after 500ms
|
|
} else {
|
|
const ret = options.rmdirSync(p, options)
|
|
return ret
|
|
}
|
|
}
|
|
|
|
module.exports = rimraf
|
|
rimraf.sync = rimrafSync
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3901:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(1176)
|
|
const path = __nccwpck_require__(1017)
|
|
const util = __nccwpck_require__(3837)
|
|
|
|
function getStats (src, dest, opts) {
|
|
const statFunc = opts.dereference
|
|
? (file) => fs.stat(file, { bigint: true })
|
|
: (file) => fs.lstat(file, { bigint: true })
|
|
return Promise.all([
|
|
statFunc(src),
|
|
statFunc(dest).catch(err => {
|
|
if (err.code === 'ENOENT') return null
|
|
throw err
|
|
})
|
|
]).then(([srcStat, destStat]) => ({ srcStat, destStat }))
|
|
}
|
|
|
|
function getStatsSync (src, dest, opts) {
|
|
let destStat
|
|
const statFunc = opts.dereference
|
|
? (file) => fs.statSync(file, { bigint: true })
|
|
: (file) => fs.lstatSync(file, { bigint: true })
|
|
const srcStat = statFunc(src)
|
|
try {
|
|
destStat = statFunc(dest)
|
|
} catch (err) {
|
|
if (err.code === 'ENOENT') return { srcStat, destStat: null }
|
|
throw err
|
|
}
|
|
return { srcStat, destStat }
|
|
}
|
|
|
|
function checkPaths (src, dest, funcName, opts, cb) {
|
|
util.callbackify(getStats)(src, dest, opts, (err, stats) => {
|
|
if (err) return cb(err)
|
|
const { srcStat, destStat } = stats
|
|
|
|
if (destStat) {
|
|
if (areIdentical(srcStat, destStat)) {
|
|
const srcBaseName = path.basename(src)
|
|
const destBaseName = path.basename(dest)
|
|
if (funcName === 'move' &&
|
|
srcBaseName !== destBaseName &&
|
|
srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {
|
|
return cb(null, { srcStat, destStat, isChangingCase: true })
|
|
}
|
|
return cb(new Error('Source and destination must not be the same.'))
|
|
}
|
|
if (srcStat.isDirectory() && !destStat.isDirectory()) {
|
|
return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`))
|
|
}
|
|
if (!srcStat.isDirectory() && destStat.isDirectory()) {
|
|
return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`))
|
|
}
|
|
}
|
|
|
|
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
|
return cb(new Error(errMsg(src, dest, funcName)))
|
|
}
|
|
return cb(null, { srcStat, destStat })
|
|
})
|
|
}
|
|
|
|
function checkPathsSync (src, dest, funcName, opts) {
|
|
const { srcStat, destStat } = getStatsSync(src, dest, opts)
|
|
|
|
if (destStat) {
|
|
if (areIdentical(srcStat, destStat)) {
|
|
const srcBaseName = path.basename(src)
|
|
const destBaseName = path.basename(dest)
|
|
if (funcName === 'move' &&
|
|
srcBaseName !== destBaseName &&
|
|
srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {
|
|
return { srcStat, destStat, isChangingCase: true }
|
|
}
|
|
throw new Error('Source and destination must not be the same.')
|
|
}
|
|
if (srcStat.isDirectory() && !destStat.isDirectory()) {
|
|
throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
|
|
}
|
|
if (!srcStat.isDirectory() && destStat.isDirectory()) {
|
|
throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)
|
|
}
|
|
}
|
|
|
|
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
|
throw new Error(errMsg(src, dest, funcName))
|
|
}
|
|
return { srcStat, destStat }
|
|
}
|
|
|
|
// recursively check if dest parent is a subdirectory of src.
|
|
// It works for all file types including symlinks since it
|
|
// checks the src and dest inodes. It starts from the deepest
|
|
// parent and stops once it reaches the src parent or the root path.
|
|
function checkParentPaths (src, srcStat, dest, funcName, cb) {
|
|
const srcParent = path.resolve(path.dirname(src))
|
|
const destParent = path.resolve(path.dirname(dest))
|
|
if (destParent === srcParent || destParent === path.parse(destParent).root) return cb()
|
|
fs.stat(destParent, { bigint: true }, (err, destStat) => {
|
|
if (err) {
|
|
if (err.code === 'ENOENT') return cb()
|
|
return cb(err)
|
|
}
|
|
if (areIdentical(srcStat, destStat)) {
|
|
return cb(new Error(errMsg(src, dest, funcName)))
|
|
}
|
|
return checkParentPaths(src, srcStat, destParent, funcName, cb)
|
|
})
|
|
}
|
|
|
|
function checkParentPathsSync (src, srcStat, dest, funcName) {
|
|
const srcParent = path.resolve(path.dirname(src))
|
|
const destParent = path.resolve(path.dirname(dest))
|
|
if (destParent === srcParent || destParent === path.parse(destParent).root) return
|
|
let destStat
|
|
try {
|
|
destStat = fs.statSync(destParent, { bigint: true })
|
|
} catch (err) {
|
|
if (err.code === 'ENOENT') return
|
|
throw err
|
|
}
|
|
if (areIdentical(srcStat, destStat)) {
|
|
throw new Error(errMsg(src, dest, funcName))
|
|
}
|
|
return checkParentPathsSync(src, srcStat, destParent, funcName)
|
|
}
|
|
|
|
function areIdentical (srcStat, destStat) {
|
|
return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev
|
|
}
|
|
|
|
// return true if dest is a subdir of src, otherwise false.
|
|
// It only checks the path strings.
|
|
function isSrcSubdir (src, dest) {
|
|
const srcArr = path.resolve(src).split(path.sep).filter(i => i)
|
|
const destArr = path.resolve(dest).split(path.sep).filter(i => i)
|
|
return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true)
|
|
}
|
|
|
|
function errMsg (src, dest, funcName) {
|
|
return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.`
|
|
}
|
|
|
|
module.exports = {
|
|
checkPaths,
|
|
checkPathsSync,
|
|
checkParentPaths,
|
|
checkParentPathsSync,
|
|
isSrcSubdir,
|
|
areIdentical
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2548:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
const fs = __nccwpck_require__(7758)
|
|
|
|
function utimesMillis (path, atime, mtime, callback) {
|
|
// if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)
|
|
fs.open(path, 'r+', (err, fd) => {
|
|
if (err) return callback(err)
|
|
fs.futimes(fd, atime, mtime, futimesErr => {
|
|
fs.close(fd, closeErr => {
|
|
if (callback) callback(futimesErr || closeErr)
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
function utimesMillisSync (path, atime, mtime) {
|
|
const fd = fs.openSync(path, 'r+')
|
|
fs.futimesSync(fd, atime, mtime)
|
|
return fs.closeSync(fd)
|
|
}
|
|
|
|
module.exports = {
|
|
utimesMillis,
|
|
utimesMillisSync
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7356:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
module.exports = clone
|
|
|
|
var getPrototypeOf = Object.getPrototypeOf || function (obj) {
|
|
return obj.__proto__
|
|
}
|
|
|
|
function clone (obj) {
|
|
if (obj === null || typeof obj !== 'object')
|
|
return obj
|
|
|
|
if (obj instanceof Object)
|
|
var copy = { __proto__: getPrototypeOf(obj) }
|
|
else
|
|
var copy = Object.create(null)
|
|
|
|
Object.getOwnPropertyNames(obj).forEach(function (key) {
|
|
Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))
|
|
})
|
|
|
|
return copy
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7758:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var fs = __nccwpck_require__(7147)
|
|
var polyfills = __nccwpck_require__(263)
|
|
var legacy = __nccwpck_require__(3086)
|
|
var clone = __nccwpck_require__(7356)
|
|
|
|
var util = __nccwpck_require__(3837)
|
|
|
|
/* istanbul ignore next - node 0.x polyfill */
|
|
var gracefulQueue
|
|
var previousSymbol
|
|
|
|
/* istanbul ignore else - node 0.x polyfill */
|
|
if (typeof Symbol === 'function' && typeof Symbol.for === 'function') {
|
|
gracefulQueue = Symbol.for('graceful-fs.queue')
|
|
// This is used in testing by future versions
|
|
previousSymbol = Symbol.for('graceful-fs.previous')
|
|
} else {
|
|
gracefulQueue = '___graceful-fs.queue'
|
|
previousSymbol = '___graceful-fs.previous'
|
|
}
|
|
|
|
function noop () {}
|
|
|
|
function publishQueue(context, queue) {
|
|
Object.defineProperty(context, gracefulQueue, {
|
|
get: function() {
|
|
return queue
|
|
}
|
|
})
|
|
}
|
|
|
|
var debug = noop
|
|
if (util.debuglog)
|
|
debug = util.debuglog('gfs4')
|
|
else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || ''))
|
|
debug = function() {
|
|
var m = util.format.apply(util, arguments)
|
|
m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ')
|
|
console.error(m)
|
|
}
|
|
|
|
// Once time initialization
|
|
if (!fs[gracefulQueue]) {
|
|
// This queue can be shared by multiple loaded instances
|
|
var queue = global[gracefulQueue] || []
|
|
publishQueue(fs, queue)
|
|
|
|
// Patch fs.close/closeSync to shared queue version, because we need
|
|
// to retry() whenever a close happens *anywhere* in the program.
|
|
// This is essential when multiple graceful-fs instances are
|
|
// in play at the same time.
|
|
fs.close = (function (fs$close) {
|
|
function close (fd, cb) {
|
|
return fs$close.call(fs, fd, function (err) {
|
|
// This function uses the graceful-fs shared queue
|
|
if (!err) {
|
|
resetQueue()
|
|
}
|
|
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
})
|
|
}
|
|
|
|
Object.defineProperty(close, previousSymbol, {
|
|
value: fs$close
|
|
})
|
|
return close
|
|
})(fs.close)
|
|
|
|
fs.closeSync = (function (fs$closeSync) {
|
|
function closeSync (fd) {
|
|
// This function uses the graceful-fs shared queue
|
|
fs$closeSync.apply(fs, arguments)
|
|
resetQueue()
|
|
}
|
|
|
|
Object.defineProperty(closeSync, previousSymbol, {
|
|
value: fs$closeSync
|
|
})
|
|
return closeSync
|
|
})(fs.closeSync)
|
|
|
|
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) {
|
|
process.on('exit', function() {
|
|
debug(fs[gracefulQueue])
|
|
__nccwpck_require__(9491).equal(fs[gracefulQueue].length, 0)
|
|
})
|
|
}
|
|
}
|
|
|
|
if (!global[gracefulQueue]) {
|
|
publishQueue(global, fs[gracefulQueue]);
|
|
}
|
|
|
|
module.exports = patch(clone(fs))
|
|
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {
|
|
module.exports = patch(fs)
|
|
fs.__patched = true;
|
|
}
|
|
|
|
function patch (fs) {
|
|
// Everything that references the open() function needs to be in here
|
|
polyfills(fs)
|
|
fs.gracefulify = patch
|
|
|
|
fs.createReadStream = createReadStream
|
|
fs.createWriteStream = createWriteStream
|
|
var fs$readFile = fs.readFile
|
|
fs.readFile = readFile
|
|
function readFile (path, options, cb) {
|
|
if (typeof options === 'function')
|
|
cb = options, options = null
|
|
|
|
return go$readFile(path, options, cb)
|
|
|
|
function go$readFile (path, options, cb, startTime) {
|
|
return fs$readFile(path, options, function (err) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
var fs$writeFile = fs.writeFile
|
|
fs.writeFile = writeFile
|
|
function writeFile (path, data, options, cb) {
|
|
if (typeof options === 'function')
|
|
cb = options, options = null
|
|
|
|
return go$writeFile(path, data, options, cb)
|
|
|
|
function go$writeFile (path, data, options, cb, startTime) {
|
|
return fs$writeFile(path, data, options, function (err) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
var fs$appendFile = fs.appendFile
|
|
if (fs$appendFile)
|
|
fs.appendFile = appendFile
|
|
function appendFile (path, data, options, cb) {
|
|
if (typeof options === 'function')
|
|
cb = options, options = null
|
|
|
|
return go$appendFile(path, data, options, cb)
|
|
|
|
function go$appendFile (path, data, options, cb, startTime) {
|
|
return fs$appendFile(path, data, options, function (err) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
var fs$copyFile = fs.copyFile
|
|
if (fs$copyFile)
|
|
fs.copyFile = copyFile
|
|
function copyFile (src, dest, flags, cb) {
|
|
if (typeof flags === 'function') {
|
|
cb = flags
|
|
flags = 0
|
|
}
|
|
return go$copyFile(src, dest, flags, cb)
|
|
|
|
function go$copyFile (src, dest, flags, cb, startTime) {
|
|
return fs$copyFile(src, dest, flags, function (err) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
var fs$readdir = fs.readdir
|
|
fs.readdir = readdir
|
|
var noReaddirOptionVersions = /^v[0-5]\./
|
|
function readdir (path, options, cb) {
|
|
if (typeof options === 'function')
|
|
cb = options, options = null
|
|
|
|
var go$readdir = noReaddirOptionVersions.test(process.version)
|
|
? function go$readdir (path, options, cb, startTime) {
|
|
return fs$readdir(path, fs$readdirCallback(
|
|
path, options, cb, startTime
|
|
))
|
|
}
|
|
: function go$readdir (path, options, cb, startTime) {
|
|
return fs$readdir(path, options, fs$readdirCallback(
|
|
path, options, cb, startTime
|
|
))
|
|
}
|
|
|
|
return go$readdir(path, options, cb)
|
|
|
|
function fs$readdirCallback (path, options, cb, startTime) {
|
|
return function (err, files) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([
|
|
go$readdir,
|
|
[path, options, cb],
|
|
err,
|
|
startTime || Date.now(),
|
|
Date.now()
|
|
])
|
|
else {
|
|
if (files && files.sort)
|
|
files.sort()
|
|
|
|
if (typeof cb === 'function')
|
|
cb.call(this, err, files)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (process.version.substr(0, 4) === 'v0.8') {
|
|
var legStreams = legacy(fs)
|
|
ReadStream = legStreams.ReadStream
|
|
WriteStream = legStreams.WriteStream
|
|
}
|
|
|
|
var fs$ReadStream = fs.ReadStream
|
|
if (fs$ReadStream) {
|
|
ReadStream.prototype = Object.create(fs$ReadStream.prototype)
|
|
ReadStream.prototype.open = ReadStream$open
|
|
}
|
|
|
|
var fs$WriteStream = fs.WriteStream
|
|
if (fs$WriteStream) {
|
|
WriteStream.prototype = Object.create(fs$WriteStream.prototype)
|
|
WriteStream.prototype.open = WriteStream$open
|
|
}
|
|
|
|
Object.defineProperty(fs, 'ReadStream', {
|
|
get: function () {
|
|
return ReadStream
|
|
},
|
|
set: function (val) {
|
|
ReadStream = val
|
|
},
|
|
enumerable: true,
|
|
configurable: true
|
|
})
|
|
Object.defineProperty(fs, 'WriteStream', {
|
|
get: function () {
|
|
return WriteStream
|
|
},
|
|
set: function (val) {
|
|
WriteStream = val
|
|
},
|
|
enumerable: true,
|
|
configurable: true
|
|
})
|
|
|
|
// legacy names
|
|
var FileReadStream = ReadStream
|
|
Object.defineProperty(fs, 'FileReadStream', {
|
|
get: function () {
|
|
return FileReadStream
|
|
},
|
|
set: function (val) {
|
|
FileReadStream = val
|
|
},
|
|
enumerable: true,
|
|
configurable: true
|
|
})
|
|
var FileWriteStream = WriteStream
|
|
Object.defineProperty(fs, 'FileWriteStream', {
|
|
get: function () {
|
|
return FileWriteStream
|
|
},
|
|
set: function (val) {
|
|
FileWriteStream = val
|
|
},
|
|
enumerable: true,
|
|
configurable: true
|
|
})
|
|
|
|
function ReadStream (path, options) {
|
|
if (this instanceof ReadStream)
|
|
return fs$ReadStream.apply(this, arguments), this
|
|
else
|
|
return ReadStream.apply(Object.create(ReadStream.prototype), arguments)
|
|
}
|
|
|
|
function ReadStream$open () {
|
|
var that = this
|
|
open(that.path, that.flags, that.mode, function (err, fd) {
|
|
if (err) {
|
|
if (that.autoClose)
|
|
that.destroy()
|
|
|
|
that.emit('error', err)
|
|
} else {
|
|
that.fd = fd
|
|
that.emit('open', fd)
|
|
that.read()
|
|
}
|
|
})
|
|
}
|
|
|
|
function WriteStream (path, options) {
|
|
if (this instanceof WriteStream)
|
|
return fs$WriteStream.apply(this, arguments), this
|
|
else
|
|
return WriteStream.apply(Object.create(WriteStream.prototype), arguments)
|
|
}
|
|
|
|
function WriteStream$open () {
|
|
var that = this
|
|
open(that.path, that.flags, that.mode, function (err, fd) {
|
|
if (err) {
|
|
that.destroy()
|
|
that.emit('error', err)
|
|
} else {
|
|
that.fd = fd
|
|
that.emit('open', fd)
|
|
}
|
|
})
|
|
}
|
|
|
|
function createReadStream (path, options) {
|
|
return new fs.ReadStream(path, options)
|
|
}
|
|
|
|
function createWriteStream (path, options) {
|
|
return new fs.WriteStream(path, options)
|
|
}
|
|
|
|
var fs$open = fs.open
|
|
fs.open = open
|
|
function open (path, flags, mode, cb) {
|
|
if (typeof mode === 'function')
|
|
cb = mode, mode = null
|
|
|
|
return go$open(path, flags, mode, cb)
|
|
|
|
function go$open (path, flags, mode, cb, startTime) {
|
|
return fs$open(path, flags, mode, function (err, fd) {
|
|
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])
|
|
else {
|
|
if (typeof cb === 'function')
|
|
cb.apply(this, arguments)
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
return fs
|
|
}
|
|
|
|
function enqueue (elem) {
|
|
debug('ENQUEUE', elem[0].name, elem[1])
|
|
fs[gracefulQueue].push(elem)
|
|
retry()
|
|
}
|
|
|
|
// keep track of the timeout between retry() calls
|
|
var retryTimer
|
|
|
|
// reset the startTime and lastTime to now
|
|
// this resets the start of the 60 second overall timeout as well as the
|
|
// delay between attempts so that we'll retry these jobs sooner
|
|
function resetQueue () {
|
|
var now = Date.now()
|
|
for (var i = 0; i < fs[gracefulQueue].length; ++i) {
|
|
// entries that are only a length of 2 are from an older version, don't
|
|
// bother modifying those since they'll be retried anyway.
|
|
if (fs[gracefulQueue][i].length > 2) {
|
|
fs[gracefulQueue][i][3] = now // startTime
|
|
fs[gracefulQueue][i][4] = now // lastTime
|
|
}
|
|
}
|
|
// call retry to make sure we're actively processing the queue
|
|
retry()
|
|
}
|
|
|
|
function retry () {
|
|
// clear the timer and remove it to help prevent unintended concurrency
|
|
clearTimeout(retryTimer)
|
|
retryTimer = undefined
|
|
|
|
if (fs[gracefulQueue].length === 0)
|
|
return
|
|
|
|
var elem = fs[gracefulQueue].shift()
|
|
var fn = elem[0]
|
|
var args = elem[1]
|
|
// these items may be unset if they were added by an older graceful-fs
|
|
var err = elem[2]
|
|
var startTime = elem[3]
|
|
var lastTime = elem[4]
|
|
|
|
// if we don't have a startTime we have no way of knowing if we've waited
|
|
// long enough, so go ahead and retry this item now
|
|
if (startTime === undefined) {
|
|
debug('RETRY', fn.name, args)
|
|
fn.apply(null, args)
|
|
} else if (Date.now() - startTime >= 60000) {
|
|
// it's been more than 60 seconds total, bail now
|
|
debug('TIMEOUT', fn.name, args)
|
|
var cb = args.pop()
|
|
if (typeof cb === 'function')
|
|
cb.call(null, err)
|
|
} else {
|
|
// the amount of time between the last attempt and right now
|
|
var sinceAttempt = Date.now() - lastTime
|
|
// the amount of time between when we first tried, and when we last tried
|
|
// rounded up to at least 1
|
|
var sinceStart = Math.max(lastTime - startTime, 1)
|
|
// backoff. wait longer than the total time we've been retrying, but only
|
|
// up to a maximum of 100ms
|
|
var desiredDelay = Math.min(sinceStart * 1.2, 100)
|
|
// it's been long enough since the last retry, do it again
|
|
if (sinceAttempt >= desiredDelay) {
|
|
debug('RETRY', fn.name, args)
|
|
fn.apply(null, args.concat([startTime]))
|
|
} else {
|
|
// if we can't do this job yet, push it to the end of the queue
|
|
// and let the next iteration check again
|
|
fs[gracefulQueue].push(elem)
|
|
}
|
|
}
|
|
|
|
// schedule our next run if one isn't already scheduled
|
|
if (retryTimer === undefined) {
|
|
retryTimer = setTimeout(retry, 0)
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3086:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var Stream = (__nccwpck_require__(2781).Stream)
|
|
|
|
module.exports = legacy
|
|
|
|
function legacy (fs) {
|
|
return {
|
|
ReadStream: ReadStream,
|
|
WriteStream: WriteStream
|
|
}
|
|
|
|
function ReadStream (path, options) {
|
|
if (!(this instanceof ReadStream)) return new ReadStream(path, options);
|
|
|
|
Stream.call(this);
|
|
|
|
var self = this;
|
|
|
|
this.path = path;
|
|
this.fd = null;
|
|
this.readable = true;
|
|
this.paused = false;
|
|
|
|
this.flags = 'r';
|
|
this.mode = 438; /*=0666*/
|
|
this.bufferSize = 64 * 1024;
|
|
|
|
options = options || {};
|
|
|
|
// Mixin options into this
|
|
var keys = Object.keys(options);
|
|
for (var index = 0, length = keys.length; index < length; index++) {
|
|
var key = keys[index];
|
|
this[key] = options[key];
|
|
}
|
|
|
|
if (this.encoding) this.setEncoding(this.encoding);
|
|
|
|
if (this.start !== undefined) {
|
|
if ('number' !== typeof this.start) {
|
|
throw TypeError('start must be a Number');
|
|
}
|
|
if (this.end === undefined) {
|
|
this.end = Infinity;
|
|
} else if ('number' !== typeof this.end) {
|
|
throw TypeError('end must be a Number');
|
|
}
|
|
|
|
if (this.start > this.end) {
|
|
throw new Error('start must be <= end');
|
|
}
|
|
|
|
this.pos = this.start;
|
|
}
|
|
|
|
if (this.fd !== null) {
|
|
process.nextTick(function() {
|
|
self._read();
|
|
});
|
|
return;
|
|
}
|
|
|
|
fs.open(this.path, this.flags, this.mode, function (err, fd) {
|
|
if (err) {
|
|
self.emit('error', err);
|
|
self.readable = false;
|
|
return;
|
|
}
|
|
|
|
self.fd = fd;
|
|
self.emit('open', fd);
|
|
self._read();
|
|
})
|
|
}
|
|
|
|
function WriteStream (path, options) {
|
|
if (!(this instanceof WriteStream)) return new WriteStream(path, options);
|
|
|
|
Stream.call(this);
|
|
|
|
this.path = path;
|
|
this.fd = null;
|
|
this.writable = true;
|
|
|
|
this.flags = 'w';
|
|
this.encoding = 'binary';
|
|
this.mode = 438; /*=0666*/
|
|
this.bytesWritten = 0;
|
|
|
|
options = options || {};
|
|
|
|
// Mixin options into this
|
|
var keys = Object.keys(options);
|
|
for (var index = 0, length = keys.length; index < length; index++) {
|
|
var key = keys[index];
|
|
this[key] = options[key];
|
|
}
|
|
|
|
if (this.start !== undefined) {
|
|
if ('number' !== typeof this.start) {
|
|
throw TypeError('start must be a Number');
|
|
}
|
|
if (this.start < 0) {
|
|
throw new Error('start must be >= zero');
|
|
}
|
|
|
|
this.pos = this.start;
|
|
}
|
|
|
|
this.busy = false;
|
|
this._queue = [];
|
|
|
|
if (this.fd === null) {
|
|
this._open = fs.open;
|
|
this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);
|
|
this.flush();
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 263:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var constants = __nccwpck_require__(2057)
|
|
|
|
var origCwd = process.cwd
|
|
var cwd = null
|
|
|
|
var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform
|
|
|
|
process.cwd = function() {
|
|
if (!cwd)
|
|
cwd = origCwd.call(process)
|
|
return cwd
|
|
}
|
|
try {
|
|
process.cwd()
|
|
} catch (er) {}
|
|
|
|
// This check is needed until node.js 12 is required
|
|
if (typeof process.chdir === 'function') {
|
|
var chdir = process.chdir
|
|
process.chdir = function (d) {
|
|
cwd = null
|
|
chdir.call(process, d)
|
|
}
|
|
if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)
|
|
}
|
|
|
|
module.exports = patch
|
|
|
|
function patch (fs) {
|
|
// (re-)implement some things that are known busted or missing.
|
|
|
|
// lchmod, broken prior to 0.6.2
|
|
// back-port the fix here.
|
|
if (constants.hasOwnProperty('O_SYMLINK') &&
|
|
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
|
|
patchLchmod(fs)
|
|
}
|
|
|
|
// lutimes implementation, or no-op
|
|
if (!fs.lutimes) {
|
|
patchLutimes(fs)
|
|
}
|
|
|
|
// https://github.com/isaacs/node-graceful-fs/issues/4
|
|
// Chown should not fail on einval or eperm if non-root.
|
|
// It should not fail on enosys ever, as this just indicates
|
|
// that a fs doesn't support the intended operation.
|
|
|
|
fs.chown = chownFix(fs.chown)
|
|
fs.fchown = chownFix(fs.fchown)
|
|
fs.lchown = chownFix(fs.lchown)
|
|
|
|
fs.chmod = chmodFix(fs.chmod)
|
|
fs.fchmod = chmodFix(fs.fchmod)
|
|
fs.lchmod = chmodFix(fs.lchmod)
|
|
|
|
fs.chownSync = chownFixSync(fs.chownSync)
|
|
fs.fchownSync = chownFixSync(fs.fchownSync)
|
|
fs.lchownSync = chownFixSync(fs.lchownSync)
|
|
|
|
fs.chmodSync = chmodFixSync(fs.chmodSync)
|
|
fs.fchmodSync = chmodFixSync(fs.fchmodSync)
|
|
fs.lchmodSync = chmodFixSync(fs.lchmodSync)
|
|
|
|
fs.stat = statFix(fs.stat)
|
|
fs.fstat = statFix(fs.fstat)
|
|
fs.lstat = statFix(fs.lstat)
|
|
|
|
fs.statSync = statFixSync(fs.statSync)
|
|
fs.fstatSync = statFixSync(fs.fstatSync)
|
|
fs.lstatSync = statFixSync(fs.lstatSync)
|
|
|
|
// if lchmod/lchown do not exist, then make them no-ops
|
|
if (fs.chmod && !fs.lchmod) {
|
|
fs.lchmod = function (path, mode, cb) {
|
|
if (cb) process.nextTick(cb)
|
|
}
|
|
fs.lchmodSync = function () {}
|
|
}
|
|
if (fs.chown && !fs.lchown) {
|
|
fs.lchown = function (path, uid, gid, cb) {
|
|
if (cb) process.nextTick(cb)
|
|
}
|
|
fs.lchownSync = function () {}
|
|
}
|
|
|
|
// on Windows, A/V software can lock the directory, causing this
|
|
// to fail with an EACCES or EPERM if the directory contains newly
|
|
// created files. Try again on failure, for up to 60 seconds.
|
|
|
|
// Set the timeout this long because some Windows Anti-Virus, such as Parity
|
|
// bit9, may lock files for up to a minute, causing npm package install
|
|
// failures. Also, take care to yield the scheduler. Windows scheduling gives
|
|
// CPU to a busy looping process, which can cause the program causing the lock
|
|
// contention to be starved of CPU by node, so the contention doesn't resolve.
|
|
if (platform === "win32") {
|
|
fs.rename = typeof fs.rename !== 'function' ? fs.rename
|
|
: (function (fs$rename) {
|
|
function rename (from, to, cb) {
|
|
var start = Date.now()
|
|
var backoff = 0;
|
|
fs$rename(from, to, function CB (er) {
|
|
if (er
|
|
&& (er.code === "EACCES" || er.code === "EPERM")
|
|
&& Date.now() - start < 60000) {
|
|
setTimeout(function() {
|
|
fs.stat(to, function (stater, st) {
|
|
if (stater && stater.code === "ENOENT")
|
|
fs$rename(from, to, CB);
|
|
else
|
|
cb(er)
|
|
})
|
|
}, backoff)
|
|
if (backoff < 100)
|
|
backoff += 10;
|
|
return;
|
|
}
|
|
if (cb) cb(er)
|
|
})
|
|
}
|
|
if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)
|
|
return rename
|
|
})(fs.rename)
|
|
}
|
|
|
|
// if read() returns EAGAIN, then just try it again.
|
|
fs.read = typeof fs.read !== 'function' ? fs.read
|
|
: (function (fs$read) {
|
|
function read (fd, buffer, offset, length, position, callback_) {
|
|
var callback
|
|
if (callback_ && typeof callback_ === 'function') {
|
|
var eagCounter = 0
|
|
callback = function (er, _, __) {
|
|
if (er && er.code === 'EAGAIN' && eagCounter < 10) {
|
|
eagCounter ++
|
|
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
|
}
|
|
callback_.apply(this, arguments)
|
|
}
|
|
}
|
|
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
|
}
|
|
|
|
// This ensures `util.promisify` works as it does for native `fs.read`.
|
|
if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)
|
|
return read
|
|
})(fs.read)
|
|
|
|
fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync
|
|
: (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
|
|
var eagCounter = 0
|
|
while (true) {
|
|
try {
|
|
return fs$readSync.call(fs, fd, buffer, offset, length, position)
|
|
} catch (er) {
|
|
if (er.code === 'EAGAIN' && eagCounter < 10) {
|
|
eagCounter ++
|
|
continue
|
|
}
|
|
throw er
|
|
}
|
|
}
|
|
}})(fs.readSync)
|
|
|
|
function patchLchmod (fs) {
|
|
fs.lchmod = function (path, mode, callback) {
|
|
fs.open( path
|
|
, constants.O_WRONLY | constants.O_SYMLINK
|
|
, mode
|
|
, function (err, fd) {
|
|
if (err) {
|
|
if (callback) callback(err)
|
|
return
|
|
}
|
|
// prefer to return the chmod error, if one occurs,
|
|
// but still try to close, and report closing errors if they occur.
|
|
fs.fchmod(fd, mode, function (err) {
|
|
fs.close(fd, function(err2) {
|
|
if (callback) callback(err || err2)
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
fs.lchmodSync = function (path, mode) {
|
|
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
|
|
|
|
// prefer to return the chmod error, if one occurs,
|
|
// but still try to close, and report closing errors if they occur.
|
|
var threw = true
|
|
var ret
|
|
try {
|
|
ret = fs.fchmodSync(fd, mode)
|
|
threw = false
|
|
} finally {
|
|
if (threw) {
|
|
try {
|
|
fs.closeSync(fd)
|
|
} catch (er) {}
|
|
} else {
|
|
fs.closeSync(fd)
|
|
}
|
|
}
|
|
return ret
|
|
}
|
|
}
|
|
|
|
function patchLutimes (fs) {
|
|
if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) {
|
|
fs.lutimes = function (path, at, mt, cb) {
|
|
fs.open(path, constants.O_SYMLINK, function (er, fd) {
|
|
if (er) {
|
|
if (cb) cb(er)
|
|
return
|
|
}
|
|
fs.futimes(fd, at, mt, function (er) {
|
|
fs.close(fd, function (er2) {
|
|
if (cb) cb(er || er2)
|
|
})
|
|
})
|
|
})
|
|
}
|
|
|
|
fs.lutimesSync = function (path, at, mt) {
|
|
var fd = fs.openSync(path, constants.O_SYMLINK)
|
|
var ret
|
|
var threw = true
|
|
try {
|
|
ret = fs.futimesSync(fd, at, mt)
|
|
threw = false
|
|
} finally {
|
|
if (threw) {
|
|
try {
|
|
fs.closeSync(fd)
|
|
} catch (er) {}
|
|
} else {
|
|
fs.closeSync(fd)
|
|
}
|
|
}
|
|
return ret
|
|
}
|
|
|
|
} else if (fs.futimes) {
|
|
fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
|
|
fs.lutimesSync = function () {}
|
|
}
|
|
}
|
|
|
|
function chmodFix (orig) {
|
|
if (!orig) return orig
|
|
return function (target, mode, cb) {
|
|
return orig.call(fs, target, mode, function (er) {
|
|
if (chownErOk(er)) er = null
|
|
if (cb) cb.apply(this, arguments)
|
|
})
|
|
}
|
|
}
|
|
|
|
function chmodFixSync (orig) {
|
|
if (!orig) return orig
|
|
return function (target, mode) {
|
|
try {
|
|
return orig.call(fs, target, mode)
|
|
} catch (er) {
|
|
if (!chownErOk(er)) throw er
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
function chownFix (orig) {
|
|
if (!orig) return orig
|
|
return function (target, uid, gid, cb) {
|
|
return orig.call(fs, target, uid, gid, function (er) {
|
|
if (chownErOk(er)) er = null
|
|
if (cb) cb.apply(this, arguments)
|
|
})
|
|
}
|
|
}
|
|
|
|
function chownFixSync (orig) {
|
|
if (!orig) return orig
|
|
return function (target, uid, gid) {
|
|
try {
|
|
return orig.call(fs, target, uid, gid)
|
|
} catch (er) {
|
|
if (!chownErOk(er)) throw er
|
|
}
|
|
}
|
|
}
|
|
|
|
function statFix (orig) {
|
|
if (!orig) return orig
|
|
// Older versions of Node erroneously returned signed integers for
|
|
// uid + gid.
|
|
return function (target, options, cb) {
|
|
if (typeof options === 'function') {
|
|
cb = options
|
|
options = null
|
|
}
|
|
function callback (er, stats) {
|
|
if (stats) {
|
|
if (stats.uid < 0) stats.uid += 0x100000000
|
|
if (stats.gid < 0) stats.gid += 0x100000000
|
|
}
|
|
if (cb) cb.apply(this, arguments)
|
|
}
|
|
return options ? orig.call(fs, target, options, callback)
|
|
: orig.call(fs, target, callback)
|
|
}
|
|
}
|
|
|
|
function statFixSync (orig) {
|
|
if (!orig) return orig
|
|
// Older versions of Node erroneously returned signed integers for
|
|
// uid + gid.
|
|
return function (target, options) {
|
|
var stats = options ? orig.call(fs, target, options)
|
|
: orig.call(fs, target)
|
|
if (stats) {
|
|
if (stats.uid < 0) stats.uid += 0x100000000
|
|
if (stats.gid < 0) stats.gid += 0x100000000
|
|
}
|
|
return stats;
|
|
}
|
|
}
|
|
|
|
// ENOSYS means that the fs doesn't support the op. Just ignore
|
|
// that, because it doesn't matter.
|
|
//
|
|
// if there's no getuid, or if getuid() is something other
|
|
// than 0, and the error is EINVAL or EPERM, then just ignore
|
|
// it.
|
|
//
|
|
// This specific case is a silent failure in cp, install, tar,
|
|
// and most other unix tools that manage permissions.
|
|
//
|
|
// When running as root, or if other types of errors are
|
|
// encountered, then it's strict.
|
|
function chownErOk (er) {
|
|
if (!er)
|
|
return true
|
|
|
|
if (er.code === "ENOSYS")
|
|
return true
|
|
|
|
var nonroot = !process.getuid || process.getuid() !== 0
|
|
if (nonroot) {
|
|
if (er.code === "EINVAL" || er.code === "EPERM")
|
|
return true
|
|
}
|
|
|
|
return false
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6160:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
let _fs
|
|
try {
|
|
_fs = __nccwpck_require__(7758)
|
|
} catch (_) {
|
|
_fs = __nccwpck_require__(7147)
|
|
}
|
|
const universalify = __nccwpck_require__(9046)
|
|
const { stringify, stripBom } = __nccwpck_require__(5902)
|
|
|
|
async function _readFile (file, options = {}) {
|
|
if (typeof options === 'string') {
|
|
options = { encoding: options }
|
|
}
|
|
|
|
const fs = options.fs || _fs
|
|
|
|
const shouldThrow = 'throws' in options ? options.throws : true
|
|
|
|
let data = await universalify.fromCallback(fs.readFile)(file, options)
|
|
|
|
data = stripBom(data)
|
|
|
|
let obj
|
|
try {
|
|
obj = JSON.parse(data, options ? options.reviver : null)
|
|
} catch (err) {
|
|
if (shouldThrow) {
|
|
err.message = `${file}: ${err.message}`
|
|
throw err
|
|
} else {
|
|
return null
|
|
}
|
|
}
|
|
|
|
return obj
|
|
}
|
|
|
|
const readFile = universalify.fromPromise(_readFile)
|
|
|
|
function readFileSync (file, options = {}) {
|
|
if (typeof options === 'string') {
|
|
options = { encoding: options }
|
|
}
|
|
|
|
const fs = options.fs || _fs
|
|
|
|
const shouldThrow = 'throws' in options ? options.throws : true
|
|
|
|
try {
|
|
let content = fs.readFileSync(file, options)
|
|
content = stripBom(content)
|
|
return JSON.parse(content, options.reviver)
|
|
} catch (err) {
|
|
if (shouldThrow) {
|
|
err.message = `${file}: ${err.message}`
|
|
throw err
|
|
} else {
|
|
return null
|
|
}
|
|
}
|
|
}
|
|
|
|
async function _writeFile (file, obj, options = {}) {
|
|
const fs = options.fs || _fs
|
|
|
|
const str = stringify(obj, options)
|
|
|
|
await universalify.fromCallback(fs.writeFile)(file, str, options)
|
|
}
|
|
|
|
const writeFile = universalify.fromPromise(_writeFile)
|
|
|
|
function writeFileSync (file, obj, options = {}) {
|
|
const fs = options.fs || _fs
|
|
|
|
const str = stringify(obj, options)
|
|
// not sure if fs.writeFileSync returns anything, but just in case
|
|
return fs.writeFileSync(file, str, options)
|
|
}
|
|
|
|
const jsonfile = {
|
|
readFile,
|
|
readFileSync,
|
|
writeFile,
|
|
writeFileSync
|
|
}
|
|
|
|
module.exports = jsonfile
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5902:
|
|
/***/ ((module) => {
|
|
|
|
function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) {
|
|
const EOF = finalEOL ? EOL : ''
|
|
const str = JSON.stringify(obj, replacer, spaces)
|
|
|
|
return str.replace(/\n/g, EOL) + EOF
|
|
}
|
|
|
|
function stripBom (content) {
|
|
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified
|
|
if (Buffer.isBuffer(content)) content = content.toString('utf8')
|
|
return content.replace(/^\uFEFF/, '')
|
|
}
|
|
|
|
module.exports = { stringify, stripBom }
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5911:
|
|
/***/ ((module, exports) => {
|
|
|
|
exports = module.exports = SemVer
|
|
|
|
var debug
|
|
/* istanbul ignore next */
|
|
if (typeof process === 'object' &&
|
|
process.env &&
|
|
process.env.NODE_DEBUG &&
|
|
/\bsemver\b/i.test(process.env.NODE_DEBUG)) {
|
|
debug = function () {
|
|
var args = Array.prototype.slice.call(arguments, 0)
|
|
args.unshift('SEMVER')
|
|
console.log.apply(console, args)
|
|
}
|
|
} else {
|
|
debug = function () {}
|
|
}
|
|
|
|
// Note: this is the semver.org version of the spec that it implements
|
|
// Not necessarily the package version of this code.
|
|
exports.SEMVER_SPEC_VERSION = '2.0.0'
|
|
|
|
var MAX_LENGTH = 256
|
|
var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
|
/* istanbul ignore next */ 9007199254740991
|
|
|
|
// Max safe segment length for coercion.
|
|
var MAX_SAFE_COMPONENT_LENGTH = 16
|
|
|
|
// The actual regexps go on exports.re
|
|
var re = exports.re = []
|
|
var src = exports.src = []
|
|
var t = exports.tokens = {}
|
|
var R = 0
|
|
|
|
function tok (n) {
|
|
t[n] = R++
|
|
}
|
|
|
|
// The following Regular Expressions can be used for tokenizing,
|
|
// validating, and parsing SemVer version strings.
|
|
|
|
// ## Numeric Identifier
|
|
// A single `0`, or a non-zero digit followed by zero or more digits.
|
|
|
|
tok('NUMERICIDENTIFIER')
|
|
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
|
tok('NUMERICIDENTIFIERLOOSE')
|
|
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
|
|
|
// ## Non-numeric Identifier
|
|
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
|
// more letters, digits, or hyphens.
|
|
|
|
tok('NONNUMERICIDENTIFIER')
|
|
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
|
|
|
// ## Main Version
|
|
// Three dot-separated numeric identifiers.
|
|
|
|
tok('MAINVERSION')
|
|
src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIER] + ')'
|
|
|
|
tok('MAINVERSIONLOOSE')
|
|
src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
|
'(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'
|
|
|
|
// ## Pre-release Version Identifier
|
|
// A numeric identifier, or a non-numeric identifier.
|
|
|
|
tok('PRERELEASEIDENTIFIER')
|
|
src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +
|
|
'|' + src[t.NONNUMERICIDENTIFIER] + ')'
|
|
|
|
tok('PRERELEASEIDENTIFIERLOOSE')
|
|
src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +
|
|
'|' + src[t.NONNUMERICIDENTIFIER] + ')'
|
|
|
|
// ## Pre-release Version
|
|
// Hyphen, followed by one or more dot-separated pre-release version
|
|
// identifiers.
|
|
|
|
tok('PRERELEASE')
|
|
src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +
|
|
'(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'
|
|
|
|
tok('PRERELEASELOOSE')
|
|
src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
|
'(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'
|
|
|
|
// ## Build Metadata Identifier
|
|
// Any combination of digits, letters, or hyphens.
|
|
|
|
tok('BUILDIDENTIFIER')
|
|
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
|
|
|
// ## Build Metadata
|
|
// Plus sign, followed by one or more period-separated build metadata
|
|
// identifiers.
|
|
|
|
tok('BUILD')
|
|
src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] +
|
|
'(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))'
|
|
|
|
// ## Full Version String
|
|
// A main version, followed optionally by a pre-release version and
|
|
// build metadata.
|
|
|
|
// Note that the only major, minor, patch, and pre-release sections of
|
|
// the version string are capturing groups. The build metadata is not a
|
|
// capturing group, because it should not ever be used in version
|
|
// comparison.
|
|
|
|
tok('FULL')
|
|
tok('FULLPLAIN')
|
|
src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +
|
|
src[t.PRERELEASE] + '?' +
|
|
src[t.BUILD] + '?'
|
|
|
|
src[t.FULL] = '^' + src[t.FULLPLAIN] + '$'
|
|
|
|
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
|
|
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
|
|
// common in the npm registry.
|
|
tok('LOOSEPLAIN')
|
|
src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] +
|
|
src[t.PRERELEASELOOSE] + '?' +
|
|
src[t.BUILD] + '?'
|
|
|
|
tok('LOOSE')
|
|
src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'
|
|
|
|
tok('GTLT')
|
|
src[t.GTLT] = '((?:<|>)?=?)'
|
|
|
|
// Something like "2.*" or "1.2.x".
|
|
// Note that "x.x" is a valid xRange identifer, meaning "any version"
|
|
// Only the first item is strictly required.
|
|
tok('XRANGEIDENTIFIERLOOSE')
|
|
src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
|
|
tok('XRANGEIDENTIFIER')
|
|
src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*'
|
|
|
|
tok('XRANGEPLAIN')
|
|
src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
|
|
'(?:' + src[t.PRERELEASE] + ')?' +
|
|
src[t.BUILD] + '?' +
|
|
')?)?'
|
|
|
|
tok('XRANGEPLAINLOOSE')
|
|
src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
|
|
'(?:' + src[t.PRERELEASELOOSE] + ')?' +
|
|
src[t.BUILD] + '?' +
|
|
')?)?'
|
|
|
|
tok('XRANGE')
|
|
src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$'
|
|
tok('XRANGELOOSE')
|
|
src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$'
|
|
|
|
// Coercion.
|
|
// Extract anything that could conceivably be a part of a valid semver
|
|
tok('COERCE')
|
|
src[t.COERCE] = '(^|[^\\d])' +
|
|
'(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
|
'(?:$|[^\\d])'
|
|
tok('COERCERTL')
|
|
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
|
|
|
// Tilde ranges.
|
|
// Meaning is "reasonably at or greater than"
|
|
tok('LONETILDE')
|
|
src[t.LONETILDE] = '(?:~>?)'
|
|
|
|
tok('TILDETRIM')
|
|
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
|
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
|
var tildeTrimReplace = '$1~'
|
|
|
|
tok('TILDE')
|
|
src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'
|
|
tok('TILDELOOSE')
|
|
src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'
|
|
|
|
// Caret ranges.
|
|
// Meaning is "at least and backwards compatible with"
|
|
tok('LONECARET')
|
|
src[t.LONECARET] = '(?:\\^)'
|
|
|
|
tok('CARETTRIM')
|
|
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
|
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
|
var caretTrimReplace = '$1^'
|
|
|
|
tok('CARET')
|
|
src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'
|
|
tok('CARETLOOSE')
|
|
src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'
|
|
|
|
// A simple gt/lt/eq thing, or just "" to indicate "any version"
|
|
tok('COMPARATORLOOSE')
|
|
src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'
|
|
tok('COMPARATOR')
|
|
src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$'
|
|
|
|
// An expression to strip any whitespace between the gtlt and the thing
|
|
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
|
|
tok('COMPARATORTRIM')
|
|
src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
|
'\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'
|
|
|
|
// this one has to use the /g flag
|
|
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
|
var comparatorTrimReplace = '$1$2$3'
|
|
|
|
// Something like `1.2.3 - 1.2.4`
|
|
// Note that these all use the loose form, because they'll be
|
|
// checked against either the strict or loose comparator form
|
|
// later.
|
|
tok('HYPHENRANGE')
|
|
src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' +
|
|
'\\s+-\\s+' +
|
|
'(' + src[t.XRANGEPLAIN] + ')' +
|
|
'\\s*$'
|
|
|
|
tok('HYPHENRANGELOOSE')
|
|
src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +
|
|
'\\s+-\\s+' +
|
|
'(' + src[t.XRANGEPLAINLOOSE] + ')' +
|
|
'\\s*$'
|
|
|
|
// Star ranges basically just allow anything at all.
|
|
tok('STAR')
|
|
src[t.STAR] = '(<|>)?=?\\s*\\*'
|
|
|
|
// Compile to actual regexp objects.
|
|
// All are flag-free, unless they were created above with a flag.
|
|
for (var i = 0; i < R; i++) {
|
|
debug(i, src[i])
|
|
if (!re[i]) {
|
|
re[i] = new RegExp(src[i])
|
|
}
|
|
}
|
|
|
|
exports.parse = parse
|
|
function parse (version, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (version instanceof SemVer) {
|
|
return version
|
|
}
|
|
|
|
if (typeof version !== 'string') {
|
|
return null
|
|
}
|
|
|
|
if (version.length > MAX_LENGTH) {
|
|
return null
|
|
}
|
|
|
|
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
|
if (!r.test(version)) {
|
|
return null
|
|
}
|
|
|
|
try {
|
|
return new SemVer(version, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
exports.valid = valid
|
|
function valid (version, options) {
|
|
var v = parse(version, options)
|
|
return v ? v.version : null
|
|
}
|
|
|
|
exports.clean = clean
|
|
function clean (version, options) {
|
|
var s = parse(version.trim().replace(/^[=v]+/, ''), options)
|
|
return s ? s.version : null
|
|
}
|
|
|
|
exports.SemVer = SemVer
|
|
|
|
function SemVer (version, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
if (version instanceof SemVer) {
|
|
if (version.loose === options.loose) {
|
|
return version
|
|
} else {
|
|
version = version.version
|
|
}
|
|
} else if (typeof version !== 'string') {
|
|
throw new TypeError('Invalid Version: ' + version)
|
|
}
|
|
|
|
if (version.length > MAX_LENGTH) {
|
|
throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
|
|
}
|
|
|
|
if (!(this instanceof SemVer)) {
|
|
return new SemVer(version, options)
|
|
}
|
|
|
|
debug('SemVer', version, options)
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
|
|
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
|
|
|
if (!m) {
|
|
throw new TypeError('Invalid Version: ' + version)
|
|
}
|
|
|
|
this.raw = version
|
|
|
|
// these are actually numbers
|
|
this.major = +m[1]
|
|
this.minor = +m[2]
|
|
this.patch = +m[3]
|
|
|
|
if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
|
|
throw new TypeError('Invalid major version')
|
|
}
|
|
|
|
if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
|
|
throw new TypeError('Invalid minor version')
|
|
}
|
|
|
|
if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
|
|
throw new TypeError('Invalid patch version')
|
|
}
|
|
|
|
// numberify any prerelease numeric ids
|
|
if (!m[4]) {
|
|
this.prerelease = []
|
|
} else {
|
|
this.prerelease = m[4].split('.').map(function (id) {
|
|
if (/^[0-9]+$/.test(id)) {
|
|
var num = +id
|
|
if (num >= 0 && num < MAX_SAFE_INTEGER) {
|
|
return num
|
|
}
|
|
}
|
|
return id
|
|
})
|
|
}
|
|
|
|
this.build = m[5] ? m[5].split('.') : []
|
|
this.format()
|
|
}
|
|
|
|
SemVer.prototype.format = function () {
|
|
this.version = this.major + '.' + this.minor + '.' + this.patch
|
|
if (this.prerelease.length) {
|
|
this.version += '-' + this.prerelease.join('.')
|
|
}
|
|
return this.version
|
|
}
|
|
|
|
SemVer.prototype.toString = function () {
|
|
return this.version
|
|
}
|
|
|
|
SemVer.prototype.compare = function (other) {
|
|
debug('SemVer.compare', this.version, this.options, other)
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
return this.compareMain(other) || this.comparePre(other)
|
|
}
|
|
|
|
SemVer.prototype.compareMain = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
return compareIdentifiers(this.major, other.major) ||
|
|
compareIdentifiers(this.minor, other.minor) ||
|
|
compareIdentifiers(this.patch, other.patch)
|
|
}
|
|
|
|
SemVer.prototype.comparePre = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
// NOT having a prerelease is > having one
|
|
if (this.prerelease.length && !other.prerelease.length) {
|
|
return -1
|
|
} else if (!this.prerelease.length && other.prerelease.length) {
|
|
return 1
|
|
} else if (!this.prerelease.length && !other.prerelease.length) {
|
|
return 0
|
|
}
|
|
|
|
var i = 0
|
|
do {
|
|
var a = this.prerelease[i]
|
|
var b = other.prerelease[i]
|
|
debug('prerelease compare', i, a, b)
|
|
if (a === undefined && b === undefined) {
|
|
return 0
|
|
} else if (b === undefined) {
|
|
return 1
|
|
} else if (a === undefined) {
|
|
return -1
|
|
} else if (a === b) {
|
|
continue
|
|
} else {
|
|
return compareIdentifiers(a, b)
|
|
}
|
|
} while (++i)
|
|
}
|
|
|
|
SemVer.prototype.compareBuild = function (other) {
|
|
if (!(other instanceof SemVer)) {
|
|
other = new SemVer(other, this.options)
|
|
}
|
|
|
|
var i = 0
|
|
do {
|
|
var a = this.build[i]
|
|
var b = other.build[i]
|
|
debug('prerelease compare', i, a, b)
|
|
if (a === undefined && b === undefined) {
|
|
return 0
|
|
} else if (b === undefined) {
|
|
return 1
|
|
} else if (a === undefined) {
|
|
return -1
|
|
} else if (a === b) {
|
|
continue
|
|
} else {
|
|
return compareIdentifiers(a, b)
|
|
}
|
|
} while (++i)
|
|
}
|
|
|
|
// preminor will bump the version up to the next minor release, and immediately
|
|
// down to pre-release. premajor and prepatch work the same way.
|
|
SemVer.prototype.inc = function (release, identifier) {
|
|
switch (release) {
|
|
case 'premajor':
|
|
this.prerelease.length = 0
|
|
this.patch = 0
|
|
this.minor = 0
|
|
this.major++
|
|
this.inc('pre', identifier)
|
|
break
|
|
case 'preminor':
|
|
this.prerelease.length = 0
|
|
this.patch = 0
|
|
this.minor++
|
|
this.inc('pre', identifier)
|
|
break
|
|
case 'prepatch':
|
|
// If this is already a prerelease, it will bump to the next version
|
|
// drop any prereleases that might already exist, since they are not
|
|
// relevant at this point.
|
|
this.prerelease.length = 0
|
|
this.inc('patch', identifier)
|
|
this.inc('pre', identifier)
|
|
break
|
|
// If the input is a non-prerelease version, this acts the same as
|
|
// prepatch.
|
|
case 'prerelease':
|
|
if (this.prerelease.length === 0) {
|
|
this.inc('patch', identifier)
|
|
}
|
|
this.inc('pre', identifier)
|
|
break
|
|
|
|
case 'major':
|
|
// If this is a pre-major version, bump up to the same major version.
|
|
// Otherwise increment major.
|
|
// 1.0.0-5 bumps to 1.0.0
|
|
// 1.1.0 bumps to 2.0.0
|
|
if (this.minor !== 0 ||
|
|
this.patch !== 0 ||
|
|
this.prerelease.length === 0) {
|
|
this.major++
|
|
}
|
|
this.minor = 0
|
|
this.patch = 0
|
|
this.prerelease = []
|
|
break
|
|
case 'minor':
|
|
// If this is a pre-minor version, bump up to the same minor version.
|
|
// Otherwise increment minor.
|
|
// 1.2.0-5 bumps to 1.2.0
|
|
// 1.2.1 bumps to 1.3.0
|
|
if (this.patch !== 0 || this.prerelease.length === 0) {
|
|
this.minor++
|
|
}
|
|
this.patch = 0
|
|
this.prerelease = []
|
|
break
|
|
case 'patch':
|
|
// If this is not a pre-release version, it will increment the patch.
|
|
// If it is a pre-release it will bump up to the same patch version.
|
|
// 1.2.0-5 patches to 1.2.0
|
|
// 1.2.0 patches to 1.2.1
|
|
if (this.prerelease.length === 0) {
|
|
this.patch++
|
|
}
|
|
this.prerelease = []
|
|
break
|
|
// This probably shouldn't be used publicly.
|
|
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
|
|
case 'pre':
|
|
if (this.prerelease.length === 0) {
|
|
this.prerelease = [0]
|
|
} else {
|
|
var i = this.prerelease.length
|
|
while (--i >= 0) {
|
|
if (typeof this.prerelease[i] === 'number') {
|
|
this.prerelease[i]++
|
|
i = -2
|
|
}
|
|
}
|
|
if (i === -1) {
|
|
// didn't increment anything
|
|
this.prerelease.push(0)
|
|
}
|
|
}
|
|
if (identifier) {
|
|
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
|
|
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
|
|
if (this.prerelease[0] === identifier) {
|
|
if (isNaN(this.prerelease[1])) {
|
|
this.prerelease = [identifier, 0]
|
|
}
|
|
} else {
|
|
this.prerelease = [identifier, 0]
|
|
}
|
|
}
|
|
break
|
|
|
|
default:
|
|
throw new Error('invalid increment argument: ' + release)
|
|
}
|
|
this.format()
|
|
this.raw = this.version
|
|
return this
|
|
}
|
|
|
|
exports.inc = inc
|
|
function inc (version, release, loose, identifier) {
|
|
if (typeof (loose) === 'string') {
|
|
identifier = loose
|
|
loose = undefined
|
|
}
|
|
|
|
try {
|
|
return new SemVer(version, loose).inc(release, identifier).version
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
exports.diff = diff
|
|
function diff (version1, version2) {
|
|
if (eq(version1, version2)) {
|
|
return null
|
|
} else {
|
|
var v1 = parse(version1)
|
|
var v2 = parse(version2)
|
|
var prefix = ''
|
|
if (v1.prerelease.length || v2.prerelease.length) {
|
|
prefix = 'pre'
|
|
var defaultResult = 'prerelease'
|
|
}
|
|
for (var key in v1) {
|
|
if (key === 'major' || key === 'minor' || key === 'patch') {
|
|
if (v1[key] !== v2[key]) {
|
|
return prefix + key
|
|
}
|
|
}
|
|
}
|
|
return defaultResult // may be undefined
|
|
}
|
|
}
|
|
|
|
exports.compareIdentifiers = compareIdentifiers
|
|
|
|
var numeric = /^[0-9]+$/
|
|
function compareIdentifiers (a, b) {
|
|
var anum = numeric.test(a)
|
|
var bnum = numeric.test(b)
|
|
|
|
if (anum && bnum) {
|
|
a = +a
|
|
b = +b
|
|
}
|
|
|
|
return a === b ? 0
|
|
: (anum && !bnum) ? -1
|
|
: (bnum && !anum) ? 1
|
|
: a < b ? -1
|
|
: 1
|
|
}
|
|
|
|
exports.rcompareIdentifiers = rcompareIdentifiers
|
|
function rcompareIdentifiers (a, b) {
|
|
return compareIdentifiers(b, a)
|
|
}
|
|
|
|
exports.major = major
|
|
function major (a, loose) {
|
|
return new SemVer(a, loose).major
|
|
}
|
|
|
|
exports.minor = minor
|
|
function minor (a, loose) {
|
|
return new SemVer(a, loose).minor
|
|
}
|
|
|
|
exports.patch = patch
|
|
function patch (a, loose) {
|
|
return new SemVer(a, loose).patch
|
|
}
|
|
|
|
exports.compare = compare
|
|
function compare (a, b, loose) {
|
|
return new SemVer(a, loose).compare(new SemVer(b, loose))
|
|
}
|
|
|
|
exports.compareLoose = compareLoose
|
|
function compareLoose (a, b) {
|
|
return compare(a, b, true)
|
|
}
|
|
|
|
exports.compareBuild = compareBuild
|
|
function compareBuild (a, b, loose) {
|
|
var versionA = new SemVer(a, loose)
|
|
var versionB = new SemVer(b, loose)
|
|
return versionA.compare(versionB) || versionA.compareBuild(versionB)
|
|
}
|
|
|
|
exports.rcompare = rcompare
|
|
function rcompare (a, b, loose) {
|
|
return compare(b, a, loose)
|
|
}
|
|
|
|
exports.sort = sort
|
|
function sort (list, loose) {
|
|
return list.sort(function (a, b) {
|
|
return exports.compareBuild(a, b, loose)
|
|
})
|
|
}
|
|
|
|
exports.rsort = rsort
|
|
function rsort (list, loose) {
|
|
return list.sort(function (a, b) {
|
|
return exports.compareBuild(b, a, loose)
|
|
})
|
|
}
|
|
|
|
exports.gt = gt
|
|
function gt (a, b, loose) {
|
|
return compare(a, b, loose) > 0
|
|
}
|
|
|
|
exports.lt = lt
|
|
function lt (a, b, loose) {
|
|
return compare(a, b, loose) < 0
|
|
}
|
|
|
|
exports.eq = eq
|
|
function eq (a, b, loose) {
|
|
return compare(a, b, loose) === 0
|
|
}
|
|
|
|
exports.neq = neq
|
|
function neq (a, b, loose) {
|
|
return compare(a, b, loose) !== 0
|
|
}
|
|
|
|
exports.gte = gte
|
|
function gte (a, b, loose) {
|
|
return compare(a, b, loose) >= 0
|
|
}
|
|
|
|
exports.lte = lte
|
|
function lte (a, b, loose) {
|
|
return compare(a, b, loose) <= 0
|
|
}
|
|
|
|
exports.cmp = cmp
|
|
function cmp (a, op, b, loose) {
|
|
switch (op) {
|
|
case '===':
|
|
if (typeof a === 'object')
|
|
a = a.version
|
|
if (typeof b === 'object')
|
|
b = b.version
|
|
return a === b
|
|
|
|
case '!==':
|
|
if (typeof a === 'object')
|
|
a = a.version
|
|
if (typeof b === 'object')
|
|
b = b.version
|
|
return a !== b
|
|
|
|
case '':
|
|
case '=':
|
|
case '==':
|
|
return eq(a, b, loose)
|
|
|
|
case '!=':
|
|
return neq(a, b, loose)
|
|
|
|
case '>':
|
|
return gt(a, b, loose)
|
|
|
|
case '>=':
|
|
return gte(a, b, loose)
|
|
|
|
case '<':
|
|
return lt(a, b, loose)
|
|
|
|
case '<=':
|
|
return lte(a, b, loose)
|
|
|
|
default:
|
|
throw new TypeError('Invalid operator: ' + op)
|
|
}
|
|
}
|
|
|
|
exports.Comparator = Comparator
|
|
function Comparator (comp, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (comp instanceof Comparator) {
|
|
if (comp.loose === !!options.loose) {
|
|
return comp
|
|
} else {
|
|
comp = comp.value
|
|
}
|
|
}
|
|
|
|
if (!(this instanceof Comparator)) {
|
|
return new Comparator(comp, options)
|
|
}
|
|
|
|
debug('comparator', comp, options)
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
this.parse(comp)
|
|
|
|
if (this.semver === ANY) {
|
|
this.value = ''
|
|
} else {
|
|
this.value = this.operator + this.semver.version
|
|
}
|
|
|
|
debug('comp', this)
|
|
}
|
|
|
|
var ANY = {}
|
|
Comparator.prototype.parse = function (comp) {
|
|
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
|
var m = comp.match(r)
|
|
|
|
if (!m) {
|
|
throw new TypeError('Invalid comparator: ' + comp)
|
|
}
|
|
|
|
this.operator = m[1] !== undefined ? m[1] : ''
|
|
if (this.operator === '=') {
|
|
this.operator = ''
|
|
}
|
|
|
|
// if it literally is just '>' or '' then allow anything.
|
|
if (!m[2]) {
|
|
this.semver = ANY
|
|
} else {
|
|
this.semver = new SemVer(m[2], this.options.loose)
|
|
}
|
|
}
|
|
|
|
Comparator.prototype.toString = function () {
|
|
return this.value
|
|
}
|
|
|
|
Comparator.prototype.test = function (version) {
|
|
debug('Comparator.test', version, this.options.loose)
|
|
|
|
if (this.semver === ANY || version === ANY) {
|
|
return true
|
|
}
|
|
|
|
if (typeof version === 'string') {
|
|
try {
|
|
version = new SemVer(version, this.options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
return cmp(version, this.operator, this.semver, this.options)
|
|
}
|
|
|
|
Comparator.prototype.intersects = function (comp, options) {
|
|
if (!(comp instanceof Comparator)) {
|
|
throw new TypeError('a Comparator is required')
|
|
}
|
|
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
var rangeTmp
|
|
|
|
if (this.operator === '') {
|
|
if (this.value === '') {
|
|
return true
|
|
}
|
|
rangeTmp = new Range(comp.value, options)
|
|
return satisfies(this.value, rangeTmp, options)
|
|
} else if (comp.operator === '') {
|
|
if (comp.value === '') {
|
|
return true
|
|
}
|
|
rangeTmp = new Range(this.value, options)
|
|
return satisfies(comp.semver, rangeTmp, options)
|
|
}
|
|
|
|
var sameDirectionIncreasing =
|
|
(this.operator === '>=' || this.operator === '>') &&
|
|
(comp.operator === '>=' || comp.operator === '>')
|
|
var sameDirectionDecreasing =
|
|
(this.operator === '<=' || this.operator === '<') &&
|
|
(comp.operator === '<=' || comp.operator === '<')
|
|
var sameSemVer = this.semver.version === comp.semver.version
|
|
var differentDirectionsInclusive =
|
|
(this.operator === '>=' || this.operator === '<=') &&
|
|
(comp.operator === '>=' || comp.operator === '<=')
|
|
var oppositeDirectionsLessThan =
|
|
cmp(this.semver, '<', comp.semver, options) &&
|
|
((this.operator === '>=' || this.operator === '>') &&
|
|
(comp.operator === '<=' || comp.operator === '<'))
|
|
var oppositeDirectionsGreaterThan =
|
|
cmp(this.semver, '>', comp.semver, options) &&
|
|
((this.operator === '<=' || this.operator === '<') &&
|
|
(comp.operator === '>=' || comp.operator === '>'))
|
|
|
|
return sameDirectionIncreasing || sameDirectionDecreasing ||
|
|
(sameSemVer && differentDirectionsInclusive) ||
|
|
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
|
|
}
|
|
|
|
exports.Range = Range
|
|
function Range (range, options) {
|
|
if (!options || typeof options !== 'object') {
|
|
options = {
|
|
loose: !!options,
|
|
includePrerelease: false
|
|
}
|
|
}
|
|
|
|
if (range instanceof Range) {
|
|
if (range.loose === !!options.loose &&
|
|
range.includePrerelease === !!options.includePrerelease) {
|
|
return range
|
|
} else {
|
|
return new Range(range.raw, options)
|
|
}
|
|
}
|
|
|
|
if (range instanceof Comparator) {
|
|
return new Range(range.value, options)
|
|
}
|
|
|
|
if (!(this instanceof Range)) {
|
|
return new Range(range, options)
|
|
}
|
|
|
|
this.options = options
|
|
this.loose = !!options.loose
|
|
this.includePrerelease = !!options.includePrerelease
|
|
|
|
// First, split based on boolean or ||
|
|
this.raw = range
|
|
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
|
return this.parseRange(range.trim())
|
|
}, this).filter(function (c) {
|
|
// throw out any that are not relevant for whatever reason
|
|
return c.length
|
|
})
|
|
|
|
if (!this.set.length) {
|
|
throw new TypeError('Invalid SemVer Range: ' + range)
|
|
}
|
|
|
|
this.format()
|
|
}
|
|
|
|
Range.prototype.format = function () {
|
|
this.range = this.set.map(function (comps) {
|
|
return comps.join(' ').trim()
|
|
}).join('||').trim()
|
|
return this.range
|
|
}
|
|
|
|
Range.prototype.toString = function () {
|
|
return this.range
|
|
}
|
|
|
|
Range.prototype.parseRange = function (range) {
|
|
var loose = this.options.loose
|
|
range = range.trim()
|
|
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
|
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
|
range = range.replace(hr, hyphenReplace)
|
|
debug('hyphen replace', range)
|
|
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
|
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
|
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
|
|
|
// `~ 1.2.3` => `~1.2.3`
|
|
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
|
|
|
// `^ 1.2.3` => `^1.2.3`
|
|
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
|
|
|
// normalize spaces
|
|
range = range.split(/\s+/).join(' ')
|
|
|
|
// At this point, the range is completely trimmed and
|
|
// ready to be split into comparators.
|
|
|
|
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
|
var set = range.split(' ').map(function (comp) {
|
|
return parseComparator(comp, this.options)
|
|
}, this).join(' ').split(/\s+/)
|
|
if (this.options.loose) {
|
|
// in loose mode, throw out any that are not valid comparators
|
|
set = set.filter(function (comp) {
|
|
return !!comp.match(compRe)
|
|
})
|
|
}
|
|
set = set.map(function (comp) {
|
|
return new Comparator(comp, this.options)
|
|
}, this)
|
|
|
|
return set
|
|
}
|
|
|
|
Range.prototype.intersects = function (range, options) {
|
|
if (!(range instanceof Range)) {
|
|
throw new TypeError('a Range is required')
|
|
}
|
|
|
|
return this.set.some(function (thisComparators) {
|
|
return (
|
|
isSatisfiable(thisComparators, options) &&
|
|
range.set.some(function (rangeComparators) {
|
|
return (
|
|
isSatisfiable(rangeComparators, options) &&
|
|
thisComparators.every(function (thisComparator) {
|
|
return rangeComparators.every(function (rangeComparator) {
|
|
return thisComparator.intersects(rangeComparator, options)
|
|
})
|
|
})
|
|
)
|
|
})
|
|
)
|
|
})
|
|
}
|
|
|
|
// take a set of comparators and determine whether there
|
|
// exists a version which can satisfy it
|
|
function isSatisfiable (comparators, options) {
|
|
var result = true
|
|
var remainingComparators = comparators.slice()
|
|
var testComparator = remainingComparators.pop()
|
|
|
|
while (result && remainingComparators.length) {
|
|
result = remainingComparators.every(function (otherComparator) {
|
|
return testComparator.intersects(otherComparator, options)
|
|
})
|
|
|
|
testComparator = remainingComparators.pop()
|
|
}
|
|
|
|
return result
|
|
}
|
|
|
|
// Mostly just for testing and legacy API reasons
|
|
exports.toComparators = toComparators
|
|
function toComparators (range, options) {
|
|
return new Range(range, options).set.map(function (comp) {
|
|
return comp.map(function (c) {
|
|
return c.value
|
|
}).join(' ').trim().split(' ')
|
|
})
|
|
}
|
|
|
|
// comprised of xranges, tildes, stars, and gtlt's at this point.
|
|
// already replaced the hyphen ranges
|
|
// turn into a set of JUST comparators.
|
|
function parseComparator (comp, options) {
|
|
debug('comp', comp, options)
|
|
comp = replaceCarets(comp, options)
|
|
debug('caret', comp)
|
|
comp = replaceTildes(comp, options)
|
|
debug('tildes', comp)
|
|
comp = replaceXRanges(comp, options)
|
|
debug('xrange', comp)
|
|
comp = replaceStars(comp, options)
|
|
debug('stars', comp)
|
|
return comp
|
|
}
|
|
|
|
function isX (id) {
|
|
return !id || id.toLowerCase() === 'x' || id === '*'
|
|
}
|
|
|
|
// ~, ~> --> * (any, kinda silly)
|
|
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
|
|
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
|
|
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
|
|
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
|
|
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
|
|
function replaceTildes (comp, options) {
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
|
return replaceTilde(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceTilde (comp, options) {
|
|
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
|
debug('tilde', comp, _, M, m, p, pr)
|
|
var ret
|
|
|
|
if (isX(M)) {
|
|
ret = ''
|
|
} else if (isX(m)) {
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
|
} else if (isX(p)) {
|
|
// ~1.2 == >=1.2.0 <1.3.0
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
|
} else if (pr) {
|
|
debug('replaceTilde pr', pr)
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
} else {
|
|
// ~1.2.3 == >=1.2.3 <1.3.0
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
|
|
debug('tilde return', ret)
|
|
return ret
|
|
})
|
|
}
|
|
|
|
// ^ --> * (any, kinda silly)
|
|
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
|
|
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
|
|
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
|
|
// ^1.2.3 --> >=1.2.3 <2.0.0
|
|
// ^1.2.0 --> >=1.2.0 <2.0.0
|
|
function replaceCarets (comp, options) {
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
|
return replaceCaret(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceCaret (comp, options) {
|
|
debug('caret', comp, options)
|
|
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
|
debug('caret', comp, _, M, m, p, pr)
|
|
var ret
|
|
|
|
if (isX(M)) {
|
|
ret = ''
|
|
} else if (isX(m)) {
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
|
} else if (isX(p)) {
|
|
if (M === '0') {
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
|
|
}
|
|
} else if (pr) {
|
|
debug('replaceCaret pr', pr)
|
|
if (M === '0') {
|
|
if (m === '0') {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
|
' <' + (+M + 1) + '.0.0'
|
|
}
|
|
} else {
|
|
debug('no pr')
|
|
if (M === '0') {
|
|
if (m === '0') {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
|
}
|
|
} else {
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
|
' <' + (+M + 1) + '.0.0'
|
|
}
|
|
}
|
|
|
|
debug('caret return', ret)
|
|
return ret
|
|
})
|
|
}
|
|
|
|
function replaceXRanges (comp, options) {
|
|
debug('replaceXRanges', comp, options)
|
|
return comp.split(/\s+/).map(function (comp) {
|
|
return replaceXRange(comp, options)
|
|
}).join(' ')
|
|
}
|
|
|
|
function replaceXRange (comp, options) {
|
|
comp = comp.trim()
|
|
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
|
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
|
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
|
var xM = isX(M)
|
|
var xm = xM || isX(m)
|
|
var xp = xm || isX(p)
|
|
var anyX = xp
|
|
|
|
if (gtlt === '=' && anyX) {
|
|
gtlt = ''
|
|
}
|
|
|
|
// if we're including prereleases in the match, then we need
|
|
// to fix this to -0, the lowest possible prerelease value
|
|
pr = options.includePrerelease ? '-0' : ''
|
|
|
|
if (xM) {
|
|
if (gtlt === '>' || gtlt === '<') {
|
|
// nothing is allowed
|
|
ret = '<0.0.0-0'
|
|
} else {
|
|
// nothing is forbidden
|
|
ret = '*'
|
|
}
|
|
} else if (gtlt && anyX) {
|
|
// we know patch is an x, because we have any x at all.
|
|
// replace X with 0
|
|
if (xm) {
|
|
m = 0
|
|
}
|
|
p = 0
|
|
|
|
if (gtlt === '>') {
|
|
// >1 => >=2.0.0
|
|
// >1.2 => >=1.3.0
|
|
// >1.2.3 => >= 1.2.4
|
|
gtlt = '>='
|
|
if (xm) {
|
|
M = +M + 1
|
|
m = 0
|
|
p = 0
|
|
} else {
|
|
m = +m + 1
|
|
p = 0
|
|
}
|
|
} else if (gtlt === '<=') {
|
|
// <=0.7.x is actually <0.8.0, since any 0.7.x should
|
|
// pass. Similarly, <=7.x is actually <8.0.0, etc.
|
|
gtlt = '<'
|
|
if (xm) {
|
|
M = +M + 1
|
|
} else {
|
|
m = +m + 1
|
|
}
|
|
}
|
|
|
|
ret = gtlt + M + '.' + m + '.' + p + pr
|
|
} else if (xm) {
|
|
ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr
|
|
} else if (xp) {
|
|
ret = '>=' + M + '.' + m + '.0' + pr +
|
|
' <' + M + '.' + (+m + 1) + '.0' + pr
|
|
}
|
|
|
|
debug('xRange return', ret)
|
|
|
|
return ret
|
|
})
|
|
}
|
|
|
|
// Because * is AND-ed with everything else in the comparator,
|
|
// and '' means "any version", just remove the *s entirely.
|
|
function replaceStars (comp, options) {
|
|
debug('replaceStars', comp, options)
|
|
// Looseness is ignored here. star is always as loose as it gets!
|
|
return comp.trim().replace(re[t.STAR], '')
|
|
}
|
|
|
|
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
|
// M, m, patch, prerelease, build
|
|
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
|
|
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
|
|
// 1.2 - 3.4 => >=1.2.0 <3.5.0
|
|
function hyphenReplace ($0,
|
|
from, fM, fm, fp, fpr, fb,
|
|
to, tM, tm, tp, tpr, tb) {
|
|
if (isX(fM)) {
|
|
from = ''
|
|
} else if (isX(fm)) {
|
|
from = '>=' + fM + '.0.0'
|
|
} else if (isX(fp)) {
|
|
from = '>=' + fM + '.' + fm + '.0'
|
|
} else {
|
|
from = '>=' + from
|
|
}
|
|
|
|
if (isX(tM)) {
|
|
to = ''
|
|
} else if (isX(tm)) {
|
|
to = '<' + (+tM + 1) + '.0.0'
|
|
} else if (isX(tp)) {
|
|
to = '<' + tM + '.' + (+tm + 1) + '.0'
|
|
} else if (tpr) {
|
|
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
|
|
} else {
|
|
to = '<=' + to
|
|
}
|
|
|
|
return (from + ' ' + to).trim()
|
|
}
|
|
|
|
// if ANY of the sets match ALL of its comparators, then pass
|
|
Range.prototype.test = function (version) {
|
|
if (!version) {
|
|
return false
|
|
}
|
|
|
|
if (typeof version === 'string') {
|
|
try {
|
|
version = new SemVer(version, this.options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
for (var i = 0; i < this.set.length; i++) {
|
|
if (testSet(this.set[i], version, this.options)) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
function testSet (set, version, options) {
|
|
for (var i = 0; i < set.length; i++) {
|
|
if (!set[i].test(version)) {
|
|
return false
|
|
}
|
|
}
|
|
|
|
if (version.prerelease.length && !options.includePrerelease) {
|
|
// Find the set of versions that are allowed to have prereleases
|
|
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
|
|
// That should allow `1.2.3-pr.2` to pass.
|
|
// However, `1.2.4-alpha.notready` should NOT be allowed,
|
|
// even though it's within the range set by the comparators.
|
|
for (i = 0; i < set.length; i++) {
|
|
debug(set[i].semver)
|
|
if (set[i].semver === ANY) {
|
|
continue
|
|
}
|
|
|
|
if (set[i].semver.prerelease.length > 0) {
|
|
var allowed = set[i].semver
|
|
if (allowed.major === version.major &&
|
|
allowed.minor === version.minor &&
|
|
allowed.patch === version.patch) {
|
|
return true
|
|
}
|
|
}
|
|
}
|
|
|
|
// Version has a -pre, but it's not one of the ones we like.
|
|
return false
|
|
}
|
|
|
|
return true
|
|
}
|
|
|
|
exports.satisfies = satisfies
|
|
function satisfies (version, range, options) {
|
|
try {
|
|
range = new Range(range, options)
|
|
} catch (er) {
|
|
return false
|
|
}
|
|
return range.test(version)
|
|
}
|
|
|
|
exports.maxSatisfying = maxSatisfying
|
|
function maxSatisfying (versions, range, options) {
|
|
var max = null
|
|
var maxSV = null
|
|
try {
|
|
var rangeObj = new Range(range, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
versions.forEach(function (v) {
|
|
if (rangeObj.test(v)) {
|
|
// satisfies(v, range, options)
|
|
if (!max || maxSV.compare(v) === -1) {
|
|
// compare(max, v, true)
|
|
max = v
|
|
maxSV = new SemVer(max, options)
|
|
}
|
|
}
|
|
})
|
|
return max
|
|
}
|
|
|
|
exports.minSatisfying = minSatisfying
|
|
function minSatisfying (versions, range, options) {
|
|
var min = null
|
|
var minSV = null
|
|
try {
|
|
var rangeObj = new Range(range, options)
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
versions.forEach(function (v) {
|
|
if (rangeObj.test(v)) {
|
|
// satisfies(v, range, options)
|
|
if (!min || minSV.compare(v) === 1) {
|
|
// compare(min, v, true)
|
|
min = v
|
|
minSV = new SemVer(min, options)
|
|
}
|
|
}
|
|
})
|
|
return min
|
|
}
|
|
|
|
exports.minVersion = minVersion
|
|
function minVersion (range, loose) {
|
|
range = new Range(range, loose)
|
|
|
|
var minver = new SemVer('0.0.0')
|
|
if (range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
minver = new SemVer('0.0.0-0')
|
|
if (range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
minver = null
|
|
for (var i = 0; i < range.set.length; ++i) {
|
|
var comparators = range.set[i]
|
|
|
|
comparators.forEach(function (comparator) {
|
|
// Clone to avoid manipulating the comparator's semver object.
|
|
var compver = new SemVer(comparator.semver.version)
|
|
switch (comparator.operator) {
|
|
case '>':
|
|
if (compver.prerelease.length === 0) {
|
|
compver.patch++
|
|
} else {
|
|
compver.prerelease.push(0)
|
|
}
|
|
compver.raw = compver.format()
|
|
/* fallthrough */
|
|
case '':
|
|
case '>=':
|
|
if (!minver || gt(minver, compver)) {
|
|
minver = compver
|
|
}
|
|
break
|
|
case '<':
|
|
case '<=':
|
|
/* Ignore maximum versions */
|
|
break
|
|
/* istanbul ignore next */
|
|
default:
|
|
throw new Error('Unexpected operation: ' + comparator.operator)
|
|
}
|
|
})
|
|
}
|
|
|
|
if (minver && range.test(minver)) {
|
|
return minver
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
exports.validRange = validRange
|
|
function validRange (range, options) {
|
|
try {
|
|
// Return '*' instead of '' so that truthiness works.
|
|
// This will throw if it's invalid anyway
|
|
return new Range(range, options).range || '*'
|
|
} catch (er) {
|
|
return null
|
|
}
|
|
}
|
|
|
|
// Determine if version is less than all the versions possible in the range
|
|
exports.ltr = ltr
|
|
function ltr (version, range, options) {
|
|
return outside(version, range, '<', options)
|
|
}
|
|
|
|
// Determine if version is greater than all the versions possible in the range.
|
|
exports.gtr = gtr
|
|
function gtr (version, range, options) {
|
|
return outside(version, range, '>', options)
|
|
}
|
|
|
|
exports.outside = outside
|
|
function outside (version, range, hilo, options) {
|
|
version = new SemVer(version, options)
|
|
range = new Range(range, options)
|
|
|
|
var gtfn, ltefn, ltfn, comp, ecomp
|
|
switch (hilo) {
|
|
case '>':
|
|
gtfn = gt
|
|
ltefn = lte
|
|
ltfn = lt
|
|
comp = '>'
|
|
ecomp = '>='
|
|
break
|
|
case '<':
|
|
gtfn = lt
|
|
ltefn = gte
|
|
ltfn = gt
|
|
comp = '<'
|
|
ecomp = '<='
|
|
break
|
|
default:
|
|
throw new TypeError('Must provide a hilo val of "<" or ">"')
|
|
}
|
|
|
|
// If it satisifes the range it is not outside
|
|
if (satisfies(version, range, options)) {
|
|
return false
|
|
}
|
|
|
|
// From now on, variable terms are as if we're in "gtr" mode.
|
|
// but note that everything is flipped for the "ltr" function.
|
|
|
|
for (var i = 0; i < range.set.length; ++i) {
|
|
var comparators = range.set[i]
|
|
|
|
var high = null
|
|
var low = null
|
|
|
|
comparators.forEach(function (comparator) {
|
|
if (comparator.semver === ANY) {
|
|
comparator = new Comparator('>=0.0.0')
|
|
}
|
|
high = high || comparator
|
|
low = low || comparator
|
|
if (gtfn(comparator.semver, high.semver, options)) {
|
|
high = comparator
|
|
} else if (ltfn(comparator.semver, low.semver, options)) {
|
|
low = comparator
|
|
}
|
|
})
|
|
|
|
// If the edge version comparator has a operator then our version
|
|
// isn't outside it
|
|
if (high.operator === comp || high.operator === ecomp) {
|
|
return false
|
|
}
|
|
|
|
// If the lowest version comparator has an operator and our version
|
|
// is less than it then it isn't higher than the range
|
|
if ((!low.operator || low.operator === comp) &&
|
|
ltefn(version, low.semver)) {
|
|
return false
|
|
} else if (low.operator === ecomp && ltfn(version, low.semver)) {
|
|
return false
|
|
}
|
|
}
|
|
return true
|
|
}
|
|
|
|
exports.prerelease = prerelease
|
|
function prerelease (version, options) {
|
|
var parsed = parse(version, options)
|
|
return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
|
|
}
|
|
|
|
exports.intersects = intersects
|
|
function intersects (r1, r2, options) {
|
|
r1 = new Range(r1, options)
|
|
r2 = new Range(r2, options)
|
|
return r1.intersects(r2)
|
|
}
|
|
|
|
exports.coerce = coerce
|
|
function coerce (version, options) {
|
|
if (version instanceof SemVer) {
|
|
return version
|
|
}
|
|
|
|
if (typeof version === 'number') {
|
|
version = String(version)
|
|
}
|
|
|
|
if (typeof version !== 'string') {
|
|
return null
|
|
}
|
|
|
|
options = options || {}
|
|
|
|
var match = null
|
|
if (!options.rtl) {
|
|
match = version.match(re[t.COERCE])
|
|
} else {
|
|
// Find the right-most coercible string that does not share
|
|
// a terminus with a more left-ward coercible string.
|
|
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
|
|
//
|
|
// Walk through the string checking with a /g regexp
|
|
// Manually set the index so as to pick up overlapping matches.
|
|
// Stop when we get a match that ends at the string end, since no
|
|
// coercible string can be more right-ward without the same terminus.
|
|
var next
|
|
while ((next = re[t.COERCERTL].exec(version)) &&
|
|
(!match || match.index + match[0].length !== version.length)
|
|
) {
|
|
if (!match ||
|
|
next.index + next[0].length !== match.index + match[0].length) {
|
|
match = next
|
|
}
|
|
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
|
}
|
|
// leave it in a clean state
|
|
re[t.COERCERTL].lastIndex = -1
|
|
}
|
|
|
|
if (match === null) {
|
|
return null
|
|
}
|
|
|
|
return parse(match[2] +
|
|
'.' + (match[3] || '0') +
|
|
'.' + (match[4] || '0'), options)
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4294:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
module.exports = __nccwpck_require__(4219);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4219:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
var net = __nccwpck_require__(1808);
|
|
var tls = __nccwpck_require__(4404);
|
|
var http = __nccwpck_require__(3685);
|
|
var https = __nccwpck_require__(5687);
|
|
var events = __nccwpck_require__(2361);
|
|
var assert = __nccwpck_require__(9491);
|
|
var util = __nccwpck_require__(3837);
|
|
|
|
|
|
exports.httpOverHttp = httpOverHttp;
|
|
exports.httpsOverHttp = httpsOverHttp;
|
|
exports.httpOverHttps = httpOverHttps;
|
|
exports.httpsOverHttps = httpsOverHttps;
|
|
|
|
|
|
function httpOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
function httpOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
|
|
function TunnelingAgent(options) {
|
|
var self = this;
|
|
self.options = options || {};
|
|
self.proxyOptions = self.options.proxy || {};
|
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
|
self.requests = [];
|
|
self.sockets = [];
|
|
|
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
|
var options = toOptions(host, port, localAddress);
|
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
|
var pending = self.requests[i];
|
|
if (pending.host === options.host && pending.port === options.port) {
|
|
// Detect the request to connect same origin server,
|
|
// reuse the connection.
|
|
self.requests.splice(i, 1);
|
|
pending.request.onSocket(socket);
|
|
return;
|
|
}
|
|
}
|
|
socket.destroy();
|
|
self.removeSocket(socket);
|
|
});
|
|
}
|
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
|
|
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
|
var self = this;
|
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
|
|
|
if (self.sockets.length >= this.maxSockets) {
|
|
// We are over limit so we'll add it to the queue.
|
|
self.requests.push(options);
|
|
return;
|
|
}
|
|
|
|
// If we are under maxSockets create a new one.
|
|
self.createSocket(options, function(socket) {
|
|
socket.on('free', onFree);
|
|
socket.on('close', onCloseOrRemove);
|
|
socket.on('agentRemove', onCloseOrRemove);
|
|
req.onSocket(socket);
|
|
|
|
function onFree() {
|
|
self.emit('free', socket, options);
|
|
}
|
|
|
|
function onCloseOrRemove(err) {
|
|
self.removeSocket(socket);
|
|
socket.removeListener('free', onFree);
|
|
socket.removeListener('close', onCloseOrRemove);
|
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
|
}
|
|
});
|
|
};
|
|
|
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
|
var self = this;
|
|
var placeholder = {};
|
|
self.sockets.push(placeholder);
|
|
|
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
|
method: 'CONNECT',
|
|
path: options.host + ':' + options.port,
|
|
agent: false,
|
|
headers: {
|
|
host: options.host + ':' + options.port
|
|
}
|
|
});
|
|
if (options.localAddress) {
|
|
connectOptions.localAddress = options.localAddress;
|
|
}
|
|
if (connectOptions.proxyAuth) {
|
|
connectOptions.headers = connectOptions.headers || {};
|
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
|
}
|
|
|
|
debug('making CONNECT request');
|
|
var connectReq = self.request(connectOptions);
|
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
|
connectReq.once('response', onResponse); // for v0.6
|
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
|
connectReq.once('error', onError);
|
|
connectReq.end();
|
|
|
|
function onResponse(res) {
|
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
|
res.upgrade = true;
|
|
}
|
|
|
|
function onUpgrade(res, socket, head) {
|
|
// Hacky.
|
|
process.nextTick(function() {
|
|
onConnect(res, socket, head);
|
|
});
|
|
}
|
|
|
|
function onConnect(res, socket, head) {
|
|
connectReq.removeAllListeners();
|
|
socket.removeAllListeners();
|
|
|
|
if (res.statusCode !== 200) {
|
|
debug('tunneling socket could not be established, statusCode=%d',
|
|
res.statusCode);
|
|
socket.destroy();
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'statusCode=' + res.statusCode);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
if (head.length > 0) {
|
|
debug('got illegal response body from proxy');
|
|
socket.destroy();
|
|
var error = new Error('got illegal response body from proxy');
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
debug('tunneling connection has established');
|
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
|
return cb(socket);
|
|
}
|
|
|
|
function onError(cause) {
|
|
connectReq.removeAllListeners();
|
|
|
|
debug('tunneling socket could not be established, cause=%s\n',
|
|
cause.message, cause.stack);
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'cause=' + cause.message);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
}
|
|
};
|
|
|
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
|
var pos = this.sockets.indexOf(socket)
|
|
if (pos === -1) {
|
|
return;
|
|
}
|
|
this.sockets.splice(pos, 1);
|
|
|
|
var pending = this.requests.shift();
|
|
if (pending) {
|
|
// If we have pending requests and a socket gets closed a new one
|
|
// needs to be created to take over in the pool for the one that closed.
|
|
this.createSocket(pending, function(socket) {
|
|
pending.request.onSocket(socket);
|
|
});
|
|
}
|
|
};
|
|
|
|
function createSecureSocket(options, cb) {
|
|
var self = this;
|
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
|
var hostHeader = options.request.getHeader('host');
|
|
var tlsOptions = mergeOptions({}, self.options, {
|
|
socket: socket,
|
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
|
});
|
|
|
|
// 0 is dummy port for v0.6
|
|
var secureSocket = tls.connect(0, tlsOptions);
|
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
|
cb(secureSocket);
|
|
});
|
|
}
|
|
|
|
|
|
function toOptions(host, port, localAddress) {
|
|
if (typeof host === 'string') { // since v0.10
|
|
return {
|
|
host: host,
|
|
port: port,
|
|
localAddress: localAddress
|
|
};
|
|
}
|
|
return host; // for v0.11 or later
|
|
}
|
|
|
|
function mergeOptions(target) {
|
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
|
var overrides = arguments[i];
|
|
if (typeof overrides === 'object') {
|
|
var keys = Object.keys(overrides);
|
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
|
var k = keys[j];
|
|
if (overrides[k] !== undefined) {
|
|
target[k] = overrides[k];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return target;
|
|
}
|
|
|
|
|
|
var debug;
|
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
|
debug = function() {
|
|
var args = Array.prototype.slice.call(arguments);
|
|
if (typeof args[0] === 'string') {
|
|
args[0] = 'TUNNEL: ' + args[0];
|
|
} else {
|
|
args.unshift('TUNNEL:');
|
|
}
|
|
console.error.apply(console, args);
|
|
}
|
|
} else {
|
|
debug = function() {};
|
|
}
|
|
exports.debug = debug; // for test
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9046:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
exports.fromCallback = function (fn) {
|
|
return Object.defineProperty(function (...args) {
|
|
if (typeof args[args.length - 1] === 'function') fn.apply(this, args)
|
|
else {
|
|
return new Promise((resolve, reject) => {
|
|
fn.call(
|
|
this,
|
|
...args,
|
|
(err, res) => (err != null) ? reject(err) : resolve(res)
|
|
)
|
|
})
|
|
}
|
|
}, 'name', { value: fn.name })
|
|
}
|
|
|
|
exports.fromPromise = function (fn) {
|
|
return Object.defineProperty(function (...args) {
|
|
const cb = args[args.length - 1]
|
|
if (typeof cb !== 'function') return fn.apply(this, args)
|
|
else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb)
|
|
}, 'name', { value: fn.name })
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2707:
|
|
/***/ ((module) => {
|
|
|
|
/**
|
|
* Convert array of 16 byte values to UUID string format of the form:
|
|
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
|
*/
|
|
var byteToHex = [];
|
|
for (var i = 0; i < 256; ++i) {
|
|
byteToHex[i] = (i + 0x100).toString(16).substr(1);
|
|
}
|
|
|
|
function bytesToUuid(buf, offset) {
|
|
var i = offset || 0;
|
|
var bth = byteToHex;
|
|
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
|
|
return ([
|
|
bth[buf[i++]], bth[buf[i++]],
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]], '-',
|
|
bth[buf[i++]], bth[buf[i++]],
|
|
bth[buf[i++]], bth[buf[i++]],
|
|
bth[buf[i++]], bth[buf[i++]]
|
|
]).join('');
|
|
}
|
|
|
|
module.exports = bytesToUuid;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5859:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
// Unique ID creation requires a high quality random # generator. In node.js
|
|
// this is pretty straight-forward - we use the crypto API.
|
|
|
|
var crypto = __nccwpck_require__(6113);
|
|
|
|
module.exports = function nodeRNG() {
|
|
return crypto.randomBytes(16);
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 824:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var rng = __nccwpck_require__(5859);
|
|
var bytesToUuid = __nccwpck_require__(2707);
|
|
|
|
function v4(options, buf, offset) {
|
|
var i = buf && offset || 0;
|
|
|
|
if (typeof(options) == 'string') {
|
|
buf = options === 'binary' ? new Array(16) : null;
|
|
options = null;
|
|
}
|
|
options = options || {};
|
|
|
|
var rnds = options.random || (options.rng || rng)();
|
|
|
|
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
|
|
rnds[6] = (rnds[6] & 0x0f) | 0x40;
|
|
rnds[8] = (rnds[8] & 0x3f) | 0x80;
|
|
|
|
// Copy bytes to buffer, if provided
|
|
if (buf) {
|
|
for (var ii = 0; ii < 16; ++ii) {
|
|
buf[i + ii] = rnds[ii];
|
|
}
|
|
}
|
|
|
|
return buf || bytesToUuid(rnds);
|
|
}
|
|
|
|
module.exports = v4;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 399:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
}
|
|
Object.defineProperty(o, k2, desc);
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const core = __importStar(__nccwpck_require__(2186));
|
|
const tc = __importStar(__nccwpck_require__(7784));
|
|
const exec = __importStar(__nccwpck_require__(1514));
|
|
const path = __importStar(__nccwpck_require__(1017));
|
|
const fs = __importStar(__nccwpck_require__(7147));
|
|
const fse = __importStar(__nccwpck_require__(5630));
|
|
const os = __importStar(__nccwpck_require__(2037));
|
|
const CMDLINE_TOOLS_VERSION = '11.0';
|
|
const COMMANDLINE_TOOLS_VERSION = '10406996';
|
|
const COMMANDLINE_TOOLS_WIN_URL = `https://dl.google.com/android/repository/commandlinetools-win-${COMMANDLINE_TOOLS_VERSION}_latest.zip`;
|
|
const COMMANDLINE_TOOLS_MAC_URL = `https://dl.google.com/android/repository/commandlinetools-mac-${COMMANDLINE_TOOLS_VERSION}_latest.zip`;
|
|
const COMMANDLINE_TOOLS_LIN_URL = `https://dl.google.com/android/repository/commandlinetools-linux-${COMMANDLINE_TOOLS_VERSION}_latest.zip`;
|
|
const HOME = os.homedir();
|
|
const ANDROID_HOME_DIR = path.join(HOME, '.android');
|
|
const ANDROID_HOME_SDK_DIR = path.join(ANDROID_HOME_DIR, 'sdk');
|
|
let ANDROID_SDK_ROOT = process.env['ANDROID_SDK_ROOT'] || ANDROID_HOME_SDK_DIR;
|
|
function getSdkManagerPath(cmdToolsVersion) {
|
|
return path.join(ANDROID_SDK_ROOT, 'cmdline-tools', cmdToolsVersion, 'bin', 'sdkmanager');
|
|
}
|
|
function findPreinstalledSdkManager() {
|
|
const result = { isFound: false, isCorrectVersion: false, exePath: '' };
|
|
// First try to find the version defined in CMDLINE_TOOLS_VERSION
|
|
result.exePath = getSdkManagerPath(CMDLINE_TOOLS_VERSION);
|
|
result.isFound = fs.existsSync(result.exePath);
|
|
if (result.isFound) {
|
|
result.isCorrectVersion = true;
|
|
return result;
|
|
}
|
|
// cmdline-tools could have a 'latest' version, but if it was installed 2 years ago
|
|
// it may not be 'latest' as of today
|
|
result.exePath = getSdkManagerPath('latest');
|
|
result.isFound = fs.existsSync(result.exePath);
|
|
if (result.isFound) {
|
|
const propertiesFile = path.join(ANDROID_SDK_ROOT, 'cmdline-tools', 'latest', 'source.properties');
|
|
if (fs.existsSync(propertiesFile)) {
|
|
result.isCorrectVersion = fs
|
|
.readFileSync(propertiesFile, 'utf8')
|
|
.includes(`Pkg.Revision=${CMDLINE_TOOLS_VERSION}`);
|
|
}
|
|
return result;
|
|
}
|
|
result.exePath = '';
|
|
// Find whatever version is available in ANDROID_SDK_ROOT
|
|
const cmdlineToolsDir = path.join(ANDROID_SDK_ROOT, 'cmdline-tools');
|
|
const foundVersions = fs.existsSync(cmdlineToolsDir)
|
|
? fs.readdirSync(cmdlineToolsDir)
|
|
: [];
|
|
const foundVersionsFiltered = foundVersions.filter(obj => '.' !== obj && '..' !== obj);
|
|
// Sort by desc, to get 2.0 first, before 1.0
|
|
const foundVersionsSorted = foundVersionsFiltered.sort((a, b) => (a > b ? -1 : 1));
|
|
for (const version of foundVersionsSorted) {
|
|
result.exePath = getSdkManagerPath(version);
|
|
result.isFound = fs.existsSync(result.exePath);
|
|
if (result.isFound) {
|
|
return result;
|
|
}
|
|
}
|
|
result.exePath = '';
|
|
return result;
|
|
}
|
|
function callSdkManager(sdkManager, arg) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const acceptBuffer = Buffer.from(Array(10).fill('y').join('\n'), 'utf8');
|
|
yield exec.exec(sdkManager, [arg], {
|
|
input: acceptBuffer
|
|
});
|
|
});
|
|
}
|
|
function installSdkManager() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
fs.mkdirSync(ANDROID_SDK_ROOT, { recursive: true });
|
|
// touch $ANDROID_SDK_ROOT/repositories.cfg
|
|
fs.closeSync(fs.openSync(path.join(ANDROID_SDK_ROOT, 'repositories.cfg'), 'w'));
|
|
const sdkManager = findPreinstalledSdkManager();
|
|
if (!sdkManager.isFound) {
|
|
let cmdlineToolsURL;
|
|
if (process.platform === 'linux') {
|
|
cmdlineToolsURL = COMMANDLINE_TOOLS_LIN_URL;
|
|
}
|
|
else if (process.platform === 'darwin') {
|
|
cmdlineToolsURL = COMMANDLINE_TOOLS_MAC_URL;
|
|
}
|
|
else if (process.platform === 'win32') {
|
|
cmdlineToolsURL = COMMANDLINE_TOOLS_WIN_URL;
|
|
}
|
|
else {
|
|
core.error(`Unsupported platform: ${process.platform}`);
|
|
return '';
|
|
}
|
|
const cmdlineToolsZip = yield tc.downloadTool(cmdlineToolsURL);
|
|
const cmdlineToolsExtractedLocation = yield tc.extractZip(cmdlineToolsZip);
|
|
// Move cmdline-tools to where it would be if it was installed through sdkmanager
|
|
// Will allow calling sdkmanager without --sdk_root='..' argument
|
|
const desiredLocation = path.join(ANDROID_SDK_ROOT, 'cmdline-tools', CMDLINE_TOOLS_VERSION);
|
|
// Create parent directory
|
|
fs.mkdirSync(path.dirname(desiredLocation), { recursive: true });
|
|
// Make sure we don't have leftover target directory (happens sometimes...)
|
|
if (fs.existsSync(desiredLocation))
|
|
fse.removeSync(desiredLocation);
|
|
// @TODO: use io.mv instead of fs-extra.moveSync once following issue is resolved:
|
|
// https://github.com/actions/toolkit/issues/706
|
|
fse.moveSync(path.join(cmdlineToolsExtractedLocation, 'cmdline-tools'), desiredLocation);
|
|
fse.removeSync(cmdlineToolsExtractedLocation);
|
|
sdkManager.exePath = getSdkManagerPath(CMDLINE_TOOLS_VERSION);
|
|
sdkManager.isCorrectVersion = true;
|
|
}
|
|
if (!sdkManager.isCorrectVersion) {
|
|
yield callSdkManager(sdkManager.exePath, `cmdline-tools;${CMDLINE_TOOLS_VERSION}`);
|
|
sdkManager.exePath = getSdkManagerPath(CMDLINE_TOOLS_VERSION);
|
|
}
|
|
return sdkManager.exePath;
|
|
});
|
|
}
|
|
function run() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if ('win16' === process.env['ImageOS']) {
|
|
if (-1 !== ANDROID_SDK_ROOT.indexOf(' ')) {
|
|
// On Windows2016, Android SDK is installed to Program Files,
|
|
// and it doesn't really work..
|
|
// C:\windows\system32\cmd.exe /D /S /C ""C:\Program Files (x86)\Android\android-sdk\cmdline-tools\3.0\bin\sdkmanager.bat" --licenses"
|
|
// Error: Could not find or load main class Files
|
|
const newSDKLocation = ANDROID_SDK_ROOT.replace(/\s/gi, '-');
|
|
core.debug(`moving ${ANDROID_SDK_ROOT} to ${newSDKLocation}`);
|
|
fs.mkdirSync(path.dirname(newSDKLocation), { recursive: true });
|
|
// intentionally using fs.renameSync,
|
|
// because it doesn't move across drives
|
|
fs.renameSync(ANDROID_SDK_ROOT, newSDKLocation);
|
|
ANDROID_SDK_ROOT = newSDKLocation;
|
|
}
|
|
}
|
|
const sdkManager = yield installSdkManager();
|
|
core.debug(`sdkmanager installed to: ${sdkManager}`);
|
|
yield callSdkManager(sdkManager, '--licenses');
|
|
yield callSdkManager(sdkManager, 'tools');
|
|
yield callSdkManager(sdkManager, 'platform-tools');
|
|
core.setOutput('ANDROID_COMMANDLINE_TOOLS_VERSION', COMMANDLINE_TOOLS_VERSION);
|
|
core.exportVariable('ANDROID_HOME', ANDROID_SDK_ROOT);
|
|
core.exportVariable('ANDROID_SDK_ROOT', ANDROID_SDK_ROOT);
|
|
core.addPath(path.dirname(sdkManager));
|
|
core.addPath(path.join(ANDROID_SDK_ROOT, 'platform-tools'));
|
|
core.debug('add matchers');
|
|
// eslint-disable-next-line no-console
|
|
console.log(`##[add-matcher]${path.join(__dirname, '..', 'matchers.json')}`);
|
|
});
|
|
}
|
|
run();
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9491:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("assert");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2081:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("child_process");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2057:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("constants");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6113:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("crypto");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2361:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("events");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7147:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("fs");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3685:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("http");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5687:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("https");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1808:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("net");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2037:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("os");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1017:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("path");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2781:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("stream");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1576:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("string_decoder");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9512:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("timers");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4404:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("tls");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3837:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("util");
|
|
|
|
/***/ })
|
|
|
|
/******/ });
|
|
/************************************************************************/
|
|
/******/ // The module cache
|
|
/******/ var __webpack_module_cache__ = {};
|
|
/******/
|
|
/******/ // The require function
|
|
/******/ function __nccwpck_require__(moduleId) {
|
|
/******/ // Check if module is in cache
|
|
/******/ var cachedModule = __webpack_module_cache__[moduleId];
|
|
/******/ if (cachedModule !== undefined) {
|
|
/******/ return cachedModule.exports;
|
|
/******/ }
|
|
/******/ // Create a new module (and put it into the cache)
|
|
/******/ var module = __webpack_module_cache__[moduleId] = {
|
|
/******/ // no module.id needed
|
|
/******/ // no module.loaded needed
|
|
/******/ exports: {}
|
|
/******/ };
|
|
/******/
|
|
/******/ // Execute the module function
|
|
/******/ var threw = true;
|
|
/******/ try {
|
|
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
|
|
/******/ threw = false;
|
|
/******/ } finally {
|
|
/******/ if(threw) delete __webpack_module_cache__[moduleId];
|
|
/******/ }
|
|
/******/
|
|
/******/ // Return the exports of the module
|
|
/******/ return module.exports;
|
|
/******/ }
|
|
/******/
|
|
/************************************************************************/
|
|
/******/ /* webpack/runtime/compat */
|
|
/******/
|
|
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";
|
|
/******/
|
|
/************************************************************************/
|
|
/******/
|
|
/******/ // startup
|
|
/******/ // Load entry module and return exports
|
|
/******/ // This entry module is referenced by other modules so it can't be inlined
|
|
/******/ var __webpack_exports__ = __nccwpck_require__(399);
|
|
/******/ module.exports = __webpack_exports__;
|
|
/******/
|
|
/******/ })()
|
|
; |