From 88bdf1618143bd64c64d9c22621802830a470025 Mon Sep 17 00:00:00 2001 From: shaowenchen Date: Tue, 19 Jan 2021 19:34:15 +0800 Subject: [PATCH] upadte dist --- dist/index.js | 12351 +++++++++++++++++++++++++++++++++++ dist/index.js.map | 1 + dist/licenses.txt | 158 + dist/sourcemap-register.js | 1 + 4 files changed, 12511 insertions(+) create mode 100644 dist/index.js create mode 100644 dist/index.js.map create mode 100644 dist/licenses.txt create mode 100644 dist/sourcemap-register.js diff --git a/dist/index.js b/dist/index.js new file mode 100644 index 0000000..fa55285 --- /dev/null +++ b/dist/index.js @@ -0,0 +1,12351 @@ +require('./sourcemap-register.js');module.exports = +/******/ (() => { // webpackBootstrap +/******/ var __webpack_modules__ = ({ + +/***/ 372: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.frp = void 0; +const util = __importStar(__nccwpck_require__(669)); +const exec = __importStar(__nccwpck_require__(514)); +const fs_1 = __nccwpck_require__(747); +const toolCache = __importStar(__nccwpck_require__(784)); +const util_1 = __nccwpck_require__(669); +const ini_1 = __importDefault(__nccwpck_require__(885)); +const utils_1 = __nccwpck_require__(918); +const writeFileAsync = util_1.promisify(fs_1.writeFile); +const name = 'frpc'; +const defaultVersion = '0.34.3'; +const fileSufix = utils_1.getArchivedExtension(); +const downloadUrlScheme = 'https://github.com/fatedier/frp/releases/download/v%s/%s%s'; +function getFullName(version) { + return util.format('frp_%s_%s_amd64', version, utils_1.getOsType()); +} +function getExecPath(version) { + return __awaiter(this, void 0, void 0, function* () { + const downloadUrl = util.format(downloadUrlScheme, version, getFullName(version), fileSufix); + const localPath = yield utils_1.downloadCache(downloadUrl, name, version, getFullName(version) + fileSufix); + const execPath = yield toolCache.extractTar(localPath); + return execPath; + }); +} +function writeInit(path, frp_server_addr, frp_server_port, frp_token, ssh_port) { + return __awaiter(this, void 0, void 0, function* () { + const config = Object(); + config['common'] = { + server_addr: frp_server_addr, + server_port: frp_server_port, + token: frp_token + }; + const ssh_port_special = util.format('ssh-%s', ssh_port); + config[ssh_port_special] = { + type: 'tcp', + local_ip: '127.0.0.1', + local_port: 8000, + remote_port: ssh_port + }; + yield writeFileAsync(path, ini_1.default.stringify(config)); + return path; + }); +} +function frp(frp_server_addr, frp_server_port, frp_token, ssh_port) { + return __awaiter(this, void 0, void 0, function* () { + const version = defaultVersion; + const execPath = yield getExecPath(version); + const initFile = util.format('%s/%s/my.ini', execPath, getFullName(version)); + yield writeInit(initFile, frp_server_addr, frp_server_port, frp_token, ssh_port); + const cmdList = [ + util.format('%s/%s/%s -c %s', execPath, getFullName(version), name, initFile) + ]; + return new Promise(resolve => { + ; + (function () { + return __awaiter(this, void 0, void 0, function* () { + for (const item of cmdList) { + yield exec.exec(item); + } + }); + })(); + setTimeout(() => resolve('exec done!'), 1000); + }); + }); +} +exports.frp = frp; + + +/***/ }), + +/***/ 109: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(186)); +const frpc_1 = __nccwpck_require__(372); +const ngrok_1 = __nccwpck_require__(125); +const sshd_1 = __nccwpck_require__(750); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const frp_server_addr = core.getInput('frp_server_addr'); + const frp_server_port = core.getInput('frp_server_port'); + const frp_token = core.getInput('frp_token'); + const ssh_port = core.getInput('ssh_port'); + const ngrok_token = core.getInput('ngrok_token'); + yield sshd_1.sshd(); + if (frp_token !== '') { + yield frpc_1.frp(frp_server_addr, frp_server_port, frp_token, ssh_port); + } + if (ngrok_token !== '') { + yield ngrok_1.ngrok(ngrok_token); + } + } + catch (error) { + core.setFailed(error.message); + } + }); +} +run(); + + +/***/ }), + +/***/ 125: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ngrok = void 0; +const util = __importStar(__nccwpck_require__(669)); +const exec = __importStar(__nccwpck_require__(514)); +const core = __importStar(__nccwpck_require__(186)); +const toolCache = __importStar(__nccwpck_require__(784)); +const utils_1 = __nccwpck_require__(918); +const yaml_1 = __importDefault(__nccwpck_require__(552)); +const fs_1 = __nccwpck_require__(747); +const util_1 = __nccwpck_require__(669); +const writeFileAsync = util_1.promisify(fs_1.writeFile); +const name = 'ngrok'; +const defaultVersion = '4VmDzA7iaHb'; +const fileSufix = '.zip'; +const downloadUrlScheme = 'https://bin.equinox.io/c/%s/%s%s'; +function getFullName() { + return util.format('ngrok-stable-%s-amd64', utils_1.getOsType()); +} +function writeTunnel(path, token) { + return __awaiter(this, void 0, void 0, function* () { + const config = Object(); + config['authtoken'] = token; + config['tunnels'] = { + 'tcp-8000': { + addr: '8000', + proto: 'tcp' + } + }; + const addr_1 = core.getInput('ngrok_addr_1'); + const proto_1 = core.getInput('ngrok_proto_1'); + const addr_2 = core.getInput('ngrok_addr_2'); + const proto_2 = core.getInput('ngrok_proto_2'); + const addr_3 = core.getInput('ngrok_addr_3'); + const proto_3 = core.getInput('ngrok_proto_3'); + if (addr_1 !== '' && proto_1 !== '') { + const key_1 = util.format('%s-%s', proto_1, addr_1); + config['tunnels'][key_1] = { + addr: addr_1, + proto: proto_1 + }; + } + if (addr_2 !== '' && proto_2 !== '') { + const key_2 = util.format('%s-%s', proto_2, addr_2); + config['tunnels'][key_2] = { + addr: addr_2, + proto: proto_2 + }; + } + if (addr_3 !== '' && proto_3 !== '') { + const key_3 = util.format('%s-%s', proto_3, addr_3); + config['tunnels'][key_3] = { + addr: addr_3, + proto: proto_3 + }; + } + yield writeFileAsync(path, yaml_1.default.stringify(config)); + return path; + }); +} +function getExecPath(version) { + return __awaiter(this, void 0, void 0, function* () { + const downloadUrl = util.format(downloadUrlScheme, version, getFullName(), fileSufix); + const localPath = yield utils_1.downloadCache(downloadUrl, name, version, getFullName() + fileSufix); + const execPath = yield toolCache.extractZip(localPath); + return execPath; + }); +} +function ngrok(NGROK_TOKEN) { + return __awaiter(this, void 0, void 0, function* () { + const version = defaultVersion; + const execPath = yield getExecPath(version); + const cfgFile = util.format('%s/ngrok.cfg', execPath); + writeTunnel(cfgFile, NGROK_TOKEN); + const cmdList = [ + util.format('chmod +x %s/ngrok', execPath), + util.format('%s/ngrok start --all --config %s --log "stdout"', execPath, cfgFile) + ]; + return new Promise(resolve => { + ; + (function () { + return __awaiter(this, void 0, void 0, function* () { + for (const item of cmdList) { + yield exec.exec(item); + } + }); + })(); + setTimeout(() => resolve('exec done!'), 1000); + }); + }); +} +exports.ngrok = ngrok; + + +/***/ }), + +/***/ 750: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.sshd = void 0; +const util = __importStar(__nccwpck_require__(669)); +const exec = __importStar(__nccwpck_require__(514)); +const toolCache = __importStar(__nccwpck_require__(784)); +const utils_1 = __nccwpck_require__(918); +const name = 'sshd'; +const defaultVersion = '2.5.1'; +const fileSufix = utils_1.getArchivedExtension(); +const downloadUrlScheme = 'https://archive.apache.org/dist/mina/sshd/%s/%s%s'; +function getFullName(version) { + return util.format('apache-sshd-%s', version); +} +function getExecPath(version) { + return __awaiter(this, void 0, void 0, function* () { + const downloadUrl = util.format(downloadUrlScheme, version, getFullName(version), fileSufix); + const localPath = yield utils_1.downloadCache(downloadUrl, name, version, getFullName(version) + fileSufix); + const execPath = yield toolCache.extractTar(localPath); + return execPath; + }); +} +function sshd() { + return __awaiter(this, void 0, void 0, function* () { + const version = defaultVersion; + const execPath = yield getExecPath(version); + const cmdList = [ + util.format('%s/%s/bin/%s', execPath, getFullName(version), name + utils_1.getBashExtension()) + ]; + return new Promise(resolve => { + ; + (function () { + return __awaiter(this, void 0, void 0, function* () { + for (const item of cmdList) { + yield exec.exec(item); + } + }); + })(); + setTimeout(() => resolve('exec done!'), 1000); + }); + }); +} +exports.sshd = sshd; + + +/***/ }), + +/***/ 918: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.execCmdList = exports.downloadCache = exports.getOsType = exports.getArchivedExtension = exports.getBashExtension = exports.getExecutableExtension = void 0; +const os = __importStar(__nccwpck_require__(87)); +const toolCache = __importStar(__nccwpck_require__(784)); +const exec = __importStar(__nccwpck_require__(514)); +function getExecutableExtension() { + if (os.type().match(/^Win/)) { + return '.exe'; + } + return ''; +} +exports.getExecutableExtension = getExecutableExtension; +function getBashExtension() { + if (os.type().match(/^Win/)) { + return '.bat'; + } + return '.sh'; +} +exports.getBashExtension = getBashExtension; +function getArchivedExtension() { + if (os.type().match(/^Win/)) { + return '.zip'; + } + return '.tar.gz'; +} +exports.getArchivedExtension = getArchivedExtension; +function getOsType() { + if (os.type().match(/^Win/)) { + // Windows_NT + return 'windows'; + } + return os.type().toLocaleLowerCase(); +} +exports.getOsType = getOsType; +function downloadCache(downloadUrl, name, version, fullName) { + return __awaiter(this, void 0, void 0, function* () { + let cachePath = toolCache.find(name, version); + if (!cachePath) { + try { + cachePath = yield toolCache.downloadTool(downloadUrl); + } + catch (exception) { + throw new Error('DownloadFailed'); + } + yield toolCache.cacheFile(cachePath, fullName, name, version); + } + return cachePath; + }); +} +exports.downloadCache = downloadCache; +function execCmdList(cmdList) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => { + ; + (function () { + return __awaiter(this, void 0, void 0, function* () { + for (const item of cmdList) { + yield exec.exec(item); + } + }); + })(); + setTimeout(() => resolve('exec done!'), 1000); + }); + }); +} +exports.execCmdList = execCmdList; + + +/***/ }), + +/***/ 351: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const os = __importStar(__nccwpck_require__(87)); +const utils_1 = __nccwpck_require__(278); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 186: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const command_1 = __nccwpck_require__(351); +const file_command_1 = __nccwpck_require__(717); +const utils_1 = __nccwpck_require__(278); +const os = __importStar(__nccwpck_require__(87)); +const path = __importStar(__nccwpck_require__(622)); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + const delimiter = '_GitHubActionsFileCommandDelimeter_'; + const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; + file_command_1.issueCommand('ENV', commandValue); + } + else { + command_1.issueCommand('set-env', { name }, convertedVal); + } +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. The value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + command_1.issueCommand('set-output', { name }, value); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + */ +function error(message) { + command_1.issue('error', message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds an warning issue + * @param message warning issue message. Errors will be converted to string via toString() + */ +function warning(message) { + command_1.issue('warning', message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 717: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// For internal use, subject to change. +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__nccwpck_require__(747)); +const os = __importStar(__nccwpck_require__(87)); +const utils_1 = __nccwpck_require__(278); +function issueCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueCommand = issueCommand; +//# sourceMappingURL=file-command.js.map + +/***/ }), + +/***/ 278: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 514: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tr = __importStar(__nccwpck_require__(159)); +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); +} +exports.exec = exec; +//# sourceMappingURL=exec.js.map + +/***/ }), + +/***/ 159: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const os = __importStar(__nccwpck_require__(87)); +const events = __importStar(__nccwpck_require__(614)); +const child = __importStar(__nccwpck_require__(129)); +const path = __importStar(__nccwpck_require__(622)); +const io = __importStar(__nccwpck_require__(436)); +const ioUtil = __importStar(__nccwpck_require__(962)); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + strBuffer = s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + const stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + const errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); + } + }); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map + +/***/ }), + +/***/ 925: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const http = __nccwpck_require__(605); +const https = __nccwpck_require__(211); +const pm = __nccwpck_require__(443); +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return new Promise(async (resolve, reject) => { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + } + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + } + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + } + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + } + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + } + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + async getJson(requestUrl, additionalHeaders = {}) { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + let res = await this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async postJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async putJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async patchJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + async request(verb, requestUrl, data, headers) { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + let parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = await this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + let parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = await this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + await response.readBody(); + await this._performExponentialBackoff(numTries); + } + } + return response; + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + let parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + this.handlers.forEach(handler => { + handler.prepareRequest(info.options); + }); + } + return info; + } + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + let proxyUrl = pm.getProxyUrl(parsedUrl); + let useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __nccwpck_require__(294); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`, + host: proxyUrl.hostname, + port: proxyUrl.port + } + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + async _processResponse(res, options) { + return new Promise(async (resolve, reject) => { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = await res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + }); + } +} +exports.HttpClient = HttpClient; + + +/***/ }), + +/***/ 443: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function getProxyUrl(reqUrl) { + let usingSsl = reqUrl.protocol === 'https:'; + let proxyUrl; + if (checkBypass(reqUrl)) { + return proxyUrl; + } + let proxyVar; + if (usingSsl) { + proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + if (proxyVar) { + proxyUrl = new URL(proxyVar); + } + return proxyUrl; +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (let upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; + + +/***/ }), + +/***/ 962: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const assert_1 = __nccwpck_require__(357); +const fs = __nccwpck_require__(747); +const path = __nccwpck_require__(622); +_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +exports.IS_WINDOWS = process.platform === 'win32'; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Recursively create a directory at `fsPath`. + * + * This implementation is optimistic, meaning it attempts to create the full + * path first, and backs up the path stack from there. + * + * @param fsPath The path to create + * @param maxDepth The maximum recursion depth + * @param depth The current recursion depth + */ +function mkdirP(fsPath, maxDepth = 1000, depth = 1) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + fsPath = path.resolve(fsPath); + if (depth >= maxDepth) + return exports.mkdir(fsPath); + try { + yield exports.mkdir(fsPath); + return; + } + catch (err) { + switch (err.code) { + case 'ENOENT': { + yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); + yield exports.mkdir(fsPath); + return; + } + default: { + let stats; + try { + stats = yield exports.stat(fsPath); + } + catch (err2) { + throw err; + } + if (!stats.isDirectory()) + throw err; + } + } + } + }); +} +exports.mkdirP = mkdirP; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +//# sourceMappingURL=io-util.js.map + +/***/ }), + +/***/ 436: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const childProcess = __nccwpck_require__(129); +const path = __nccwpck_require__(622); +const util_1 = __nccwpck_require__(669); +const ioUtil = __nccwpck_require__(962); +const exec = util_1.promisify(childProcess.exec); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another + // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. + try { + if (yield ioUtil.isDirectory(inputPath, true)) { + yield exec(`rd /s /q "${inputPath}"`); + } + else { + yield exec(`del /f /a "${inputPath}"`); + } + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + // Shelling out fails to remove a symlink folder with missing source, this unlink catches that + try { + yield ioUtil.unlink(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + } + else { + let isDir = false; + try { + isDir = yield ioUtil.isDirectory(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + return; + } + if (isDir) { + yield exec(`rm -rf "${inputPath}"`); + } + else { + yield ioUtil.unlink(inputPath); + } + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + yield ioUtil.mkdirP(fsPath); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + } + try { + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { + for (const extension of process.env.PATHEXT.split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return filePath; + } + return ''; + } + // if any path separators, return empty + if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { + return ''; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // return the first match + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); + if (filePath) { + return filePath; + } + } + return ''; + } + catch (err) { + throw new Error(`which failed with message ${err.message}`); + } + }); +} +exports.which = which; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + return { force, recursive }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); +} +//# sourceMappingURL=io.js.map + +/***/ }), + +/***/ 473: +/***/ (function(module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const semver = __importStar(__nccwpck_require__(562)); +const core_1 = __nccwpck_require__(186); +// needs to be require for core node modules to be mocked +/* eslint @typescript-eslint/no-require-imports: 0 */ +const os = __nccwpck_require__(87); +const cp = __nccwpck_require__(129); +const fs = __nccwpck_require__(747); +function _findMatch(versionSpec, stable, candidates, archFilter) { + return __awaiter(this, void 0, void 0, function* () { + const platFilter = os.platform(); + let result; + let match; + let file; + for (const candidate of candidates) { + const version = candidate.version; + core_1.debug(`check ${version} satisfies ${versionSpec}`); + if (semver.satisfies(version, versionSpec) && + (!stable || candidate.stable === stable)) { + file = candidate.files.find(item => { + core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); + let chk = item.arch === archFilter && item.platform === platFilter; + if (chk && item.platform_version) { + const osVersion = module.exports._getOsVersion(); + if (osVersion === item.platform_version) { + chk = true; + } + else { + chk = semver.satisfies(osVersion, item.platform_version); + } + } + return chk; + }); + if (file) { + core_1.debug(`matched ${candidate.version}`); + match = candidate; + break; + } + } + } + if (match && file) { + // clone since we're mutating the file list to be only the file that matches + result = Object.assign({}, match); + result.files = [file]; + } + return result; + }); +} +exports._findMatch = _findMatch; +function _getOsVersion() { + // TODO: add windows and other linux, arm variants + // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python) + const plat = os.platform(); + let version = ''; + if (plat === 'darwin') { + version = cp.execSync('sw_vers -productVersion').toString(); + } + else if (plat === 'linux') { + // lsb_release process not in some containers, readfile + // Run cat /etc/lsb-release + // DISTRIB_ID=Ubuntu + // DISTRIB_RELEASE=18.04 + // DISTRIB_CODENAME=bionic + // DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS" + const lsbContents = module.exports._readLinuxVersionFile(); + if (lsbContents) { + const lines = lsbContents.split('\n'); + for (const line of lines) { + const parts = line.split('='); + if (parts.length === 2 && parts[0].trim() === 'DISTRIB_RELEASE') { + version = parts[1].trim(); + break; + } + } + } + } + return version; +} +exports._getOsVersion = _getOsVersion; +function _readLinuxVersionFile() { + const lsbFile = '/etc/lsb-release'; + let contents = ''; + if (fs.existsSync(lsbFile)) { + contents = fs.readFileSync(lsbFile).toString(); + } + return contents; +} +exports._readLinuxVersionFile = _readLinuxVersionFile; +//# sourceMappingURL=manifest.js.map + +/***/ }), + +/***/ 279: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(186)); +/** + * Internal class for retries + */ +class RetryHelper { + constructor(maxAttempts, minSeconds, maxSeconds) { + if (maxAttempts < 1) { + throw new Error('max attempts should be greater than or equal to 1'); + } + this.maxAttempts = maxAttempts; + this.minSeconds = Math.floor(minSeconds); + this.maxSeconds = Math.floor(maxSeconds); + if (this.minSeconds > this.maxSeconds) { + throw new Error('min seconds should be less than or equal to max seconds'); + } + } + execute(action, isRetryable) { + return __awaiter(this, void 0, void 0, function* () { + let attempt = 1; + while (attempt < this.maxAttempts) { + // Try + try { + return yield action(); + } + catch (err) { + if (isRetryable && !isRetryable(err)) { + throw err; + } + core.info(err.message); + } + // Sleep + const seconds = this.getSleepAmount(); + core.info(`Waiting ${seconds} seconds before trying again`); + yield this.sleep(seconds); + attempt++; + } + // Last attempt + return yield action(); + }); + } + getSleepAmount() { + return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + + this.minSeconds); + } + sleep(seconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, seconds * 1000)); + }); + } +} +exports.RetryHelper = RetryHelper; +//# sourceMappingURL=retry-helper.js.map + +/***/ }), + +/***/ 784: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(186)); +const io = __importStar(__nccwpck_require__(436)); +const fs = __importStar(__nccwpck_require__(747)); +const mm = __importStar(__nccwpck_require__(473)); +const os = __importStar(__nccwpck_require__(87)); +const path = __importStar(__nccwpck_require__(622)); +const httpm = __importStar(__nccwpck_require__(925)); +const semver = __importStar(__nccwpck_require__(562)); +const stream = __importStar(__nccwpck_require__(413)); +const util = __importStar(__nccwpck_require__(669)); +const v4_1 = __importDefault(__nccwpck_require__(468)); +const exec_1 = __nccwpck_require__(514); +const assert_1 = __nccwpck_require__(357); +const retry_helper_1 = __nccwpck_require__(279); +class HTTPError extends Error { + constructor(httpStatusCode) { + super(`Unexpected HTTP response: ${httpStatusCode}`); + this.httpStatusCode = httpStatusCode; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.HTTPError = HTTPError; +const IS_WINDOWS = process.platform === 'win32'; +const IS_MAC = process.platform === 'darwin'; +const userAgent = 'actions/tool-cache'; +/** + * Download a tool from an url and stream it into a file + * + * @param url url of tool to download + * @param dest path to download tool + * @param auth authorization header + * @returns path to downloaded tool + */ +function downloadTool(url, dest, auth) { + return __awaiter(this, void 0, void 0, function* () { + dest = dest || path.join(_getTempDirectory(), v4_1.default()); + yield io.mkdirP(path.dirname(dest)); + core.debug(`Downloading ${url}`); + core.debug(`Destination ${dest}`); + const maxAttempts = 3; + const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10); + const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20); + const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); + return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () { + return yield downloadToolAttempt(url, dest || '', auth); + }), (err) => { + if (err instanceof HTTPError && err.httpStatusCode) { + // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests + if (err.httpStatusCode < 500 && + err.httpStatusCode !== 408 && + err.httpStatusCode !== 429) { + return false; + } + } + // Otherwise retry + return true; + }); + }); +} +exports.downloadTool = downloadTool; +function downloadToolAttempt(url, dest, auth) { + return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(dest)) { + throw new Error(`Destination file path ${dest} already exists`); + } + // Get the response headers + const http = new httpm.HttpClient(userAgent, [], { + allowRetries: false + }); + let headers; + if (auth) { + core.debug('set auth'); + headers = { + authorization: auth + }; + } + const response = yield http.get(url, headers); + if (response.message.statusCode !== 200) { + const err = new HTTPError(response.message.statusCode); + core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + throw err; + } + // Download the response body + const pipeline = util.promisify(stream.pipeline); + const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message); + const readStream = responseMessageFactory(); + let succeeded = false; + try { + yield pipeline(readStream, fs.createWriteStream(dest)); + core.debug('download complete'); + succeeded = true; + return dest; + } + finally { + // Error, delete dest before retry + if (!succeeded) { + core.debug('download failed'); + try { + yield io.rmRF(dest); + } + catch (err) { + core.debug(`Failed to delete '${dest}'. ${err.message}`); + } + } + } + }); +} +/** + * Extract a .7z file + * + * @param file path to the .7z file + * @param dest destination directory. Optional. + * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this + * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will + * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is + * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line + * interface, it is smaller than the full command line interface, and it does support long paths. At the + * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website. + * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path + * to 7zr.exe can be pass to this function. + * @returns path to the destination directory + */ +function extract7z(file, dest, _7zPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS'); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + const originalCwd = process.cwd(); + process.chdir(dest); + if (_7zPath) { + try { + const logLevel = core.isDebug() ? '-bb1' : '-bb0'; + const args = [ + 'x', + logLevel, + '-bd', + '-sccUTF-8', + file + ]; + const options = { + silent: true + }; + yield exec_1.exec(`"${_7zPath}"`, args, options); + } + finally { + process.chdir(originalCwd); + } + } + else { + const escapedScript = path + .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1') + .replace(/'/g, "''") + .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; + const args = [ + '-NoLogo', + '-Sta', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + command + ]; + const options = { + silent: true + }; + try { + const powershellPath = yield io.which('powershell', true); + yield exec_1.exec(`"${powershellPath}"`, args, options); + } + finally { + process.chdir(originalCwd); + } + } + return dest; + }); +} +exports.extract7z = extract7z; +/** + * Extract a compressed tar archive + * + * @param file path to the tar + * @param dest destination directory. Optional. + * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional. + * @returns path to the destination directory + */ +function extractTar(file, dest, flags = 'xz') { + return __awaiter(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + // Create dest + dest = yield _createExtractFolder(dest); + // Determine whether GNU tar + core.debug('Checking tar --version'); + let versionOutput = ''; + yield exec_1.exec('tar --version', [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + core.debug(versionOutput.trim()); + const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR'); + // Initialize args + let args; + if (flags instanceof Array) { + args = flags; + } + else { + args = [flags]; + } + if (core.isDebug() && !flags.includes('v')) { + args.push('-v'); + } + let destArg = dest; + let fileArg = file; + if (IS_WINDOWS && isGnuTar) { + args.push('--force-local'); + destArg = dest.replace(/\\/g, '/'); + // Technically only the dest needs to have `/` but for aesthetic consistency + // convert slashes in the file arg too. + fileArg = file.replace(/\\/g, '/'); + } + if (isGnuTar) { + // Suppress warnings when using GNU tar to extract archives created by BSD tar + args.push('--warning=no-unknown-keyword'); + } + args.push('-C', destArg, '-f', fileArg); + yield exec_1.exec(`tar`, args); + return dest; + }); +} +exports.extractTar = extractTar; +/** + * Extract a xar compatible archive + * + * @param file path to the archive + * @param dest destination directory. Optional. + * @param flags flags for the xar. Optional. + * @returns path to the destination directory + */ +function extractXar(file, dest, flags = []) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(IS_MAC, 'extractXar() not supported on current OS'); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + let args; + if (flags instanceof Array) { + args = flags; + } + else { + args = [flags]; + } + args.push('-x', '-C', dest, '-f', file); + if (core.isDebug()) { + args.push('-v'); + } + const xarPath = yield io.which('xar', true); + yield exec_1.exec(`"${xarPath}"`, _unique(args)); + return dest; + }); +} +exports.extractXar = extractXar; +/** + * Extract a zip + * + * @param file path to the zip + * @param dest destination directory. Optional. + * @returns path to the destination directory + */ +function extractZip(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + dest = yield _createExtractFolder(dest); + if (IS_WINDOWS) { + yield extractZipWin(file, dest); + } + else { + yield extractZipNix(file, dest); + } + return dest; + }); +} +exports.extractZip = extractZip; +function extractZipWin(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + // build the powershell command + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines + const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`; + // run powershell + const powershellPath = yield io.which('powershell', true); + const args = [ + '-NoLogo', + '-Sta', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + command + ]; + yield exec_1.exec(`"${powershellPath}"`, args); + }); +} +function extractZipNix(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + const unzipPath = yield io.which('unzip', true); + const args = [file]; + if (!core.isDebug()) { + args.unshift('-q'); + } + yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest }); + }); +} +/** + * Caches a directory and installs it into the tool cacheDir + * + * @param sourceDir the directory to cache into tools + * @param tool tool name + * @param version version of the tool. semver format + * @param arch architecture of the tool. Optional. Defaults to machine architecture + */ +function cacheDir(sourceDir, tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + version = semver.clean(version) || version; + arch = arch || os.arch(); + core.debug(`Caching tool ${tool} ${version} ${arch}`); + core.debug(`source dir: ${sourceDir}`); + if (!fs.statSync(sourceDir).isDirectory()) { + throw new Error('sourceDir is not a directory'); + } + // Create the tool dir + const destPath = yield _createToolPath(tool, version, arch); + // copy each child item. do not move. move can fail on Windows + // due to anti-virus software having an open handle on a file. + for (const itemName of fs.readdirSync(sourceDir)) { + const s = path.join(sourceDir, itemName); + yield io.cp(s, destPath, { recursive: true }); + } + // write .complete + _completeToolPath(tool, version, arch); + return destPath; + }); +} +exports.cacheDir = cacheDir; +/** + * Caches a downloaded file (GUID) and installs it + * into the tool cache with a given targetName + * + * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid. + * @param targetFile the name of the file name in the tools directory + * @param tool tool name + * @param version version of the tool. semver format + * @param arch architecture of the tool. Optional. Defaults to machine architecture + */ +function cacheFile(sourceFile, targetFile, tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + version = semver.clean(version) || version; + arch = arch || os.arch(); + core.debug(`Caching tool ${tool} ${version} ${arch}`); + core.debug(`source file: ${sourceFile}`); + if (!fs.statSync(sourceFile).isFile()) { + throw new Error('sourceFile is not a file'); + } + // create the tool dir + const destFolder = yield _createToolPath(tool, version, arch); + // copy instead of move. move can fail on Windows due to + // anti-virus software having an open handle on a file. + const destPath = path.join(destFolder, targetFile); + core.debug(`destination file ${destPath}`); + yield io.cp(sourceFile, destPath); + // write .complete + _completeToolPath(tool, version, arch); + return destFolder; + }); +} +exports.cacheFile = cacheFile; +/** + * Finds the path to a tool version in the local installed tool cache + * + * @param toolName name of the tool + * @param versionSpec version of the tool + * @param arch optional arch. defaults to arch of computer + */ +function find(toolName, versionSpec, arch) { + if (!toolName) { + throw new Error('toolName parameter is required'); + } + if (!versionSpec) { + throw new Error('versionSpec parameter is required'); + } + arch = arch || os.arch(); + // attempt to resolve an explicit version + if (!_isExplicitVersion(versionSpec)) { + const localVersions = findAllVersions(toolName, arch); + const match = _evaluateVersions(localVersions, versionSpec); + versionSpec = match; + } + // check for the explicit version in the cache + let toolPath = ''; + if (versionSpec) { + versionSpec = semver.clean(versionSpec) || ''; + const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch); + core.debug(`checking cache: ${cachePath}`); + if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) { + core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + toolPath = cachePath; + } + else { + core.debug('not found'); + } + } + return toolPath; +} +exports.find = find; +/** + * Finds the paths to all versions of a tool that are installed in the local tool cache + * + * @param toolName name of the tool + * @param arch optional arch. defaults to arch of computer + */ +function findAllVersions(toolName, arch) { + const versions = []; + arch = arch || os.arch(); + const toolPath = path.join(_getCacheDirectory(), toolName); + if (fs.existsSync(toolPath)) { + const children = fs.readdirSync(toolPath); + for (const child of children) { + if (_isExplicitVersion(child)) { + const fullPath = path.join(toolPath, child, arch || ''); + if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) { + versions.push(child); + } + } + } + } + return versions; +} +exports.findAllVersions = findAllVersions; +function getManifestFromRepo(owner, repo, auth, branch = 'master') { + return __awaiter(this, void 0, void 0, function* () { + let releases = []; + const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; + const http = new httpm.HttpClient('tool-cache'); + const headers = {}; + if (auth) { + core.debug('set auth'); + headers.authorization = auth; + } + const response = yield http.getJson(treeUrl, headers); + if (!response.result) { + return releases; + } + let manifestUrl = ''; + for (const item of response.result.tree) { + if (item.path === 'versions-manifest.json') { + manifestUrl = item.url; + break; + } + } + headers['accept'] = 'application/vnd.github.VERSION.raw'; + let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); + if (versionsRaw) { + // shouldn't be needed but protects against invalid json saved with BOM + versionsRaw = versionsRaw.replace(/^\uFEFF/, ''); + try { + releases = JSON.parse(versionsRaw); + } + catch (_a) { + core.debug('Invalid json'); + } + } + return releases; + }); +} +exports.getManifestFromRepo = getManifestFromRepo; +function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) { + return __awaiter(this, void 0, void 0, function* () { + // wrap the internal impl + const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); + return match; + }); +} +exports.findFromManifest = findFromManifest; +function _createExtractFolder(dest) { + return __awaiter(this, void 0, void 0, function* () { + if (!dest) { + // create a temp dir + dest = path.join(_getTempDirectory(), v4_1.default()); + } + yield io.mkdirP(dest); + return dest; + }); +} +function _createToolPath(tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); + core.debug(`destination ${folderPath}`); + const markerPath = `${folderPath}.complete`; + yield io.rmRF(folderPath); + yield io.rmRF(markerPath); + yield io.mkdirP(folderPath); + return folderPath; + }); +} +function _completeToolPath(tool, version, arch) { + const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); + const markerPath = `${folderPath}.complete`; + fs.writeFileSync(markerPath, ''); + core.debug('finished caching tool'); +} +function _isExplicitVersion(versionSpec) { + const c = semver.clean(versionSpec) || ''; + core.debug(`isExplicit: ${c}`); + const valid = semver.valid(c) != null; + core.debug(`explicit? ${valid}`); + return valid; +} +function _evaluateVersions(versions, versionSpec) { + let version = ''; + core.debug(`evaluating ${versions.length} versions`); + versions = versions.sort((a, b) => { + if (semver.gt(a, b)) { + return 1; + } + return -1; + }); + for (let i = versions.length - 1; i >= 0; i--) { + const potential = versions[i]; + const satisfied = semver.satisfies(potential, versionSpec); + if (satisfied) { + version = potential; + break; + } + } + if (version) { + core.debug(`matched: ${version}`); + } + else { + core.debug('match not found'); + } + return version; +} +/** + * Gets RUNNER_TOOL_CACHE + */ +function _getCacheDirectory() { + const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || ''; + assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined'); + return cacheDirectory; +} +/** + * Gets RUNNER_TEMP + */ +function _getTempDirectory() { + const tempDirectory = process.env['RUNNER_TEMP'] || ''; + assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); + return tempDirectory; +} +/** + * Gets a global variable + */ +function _getGlobal(key, defaultValue) { + /* eslint-disable @typescript-eslint/no-explicit-any */ + const value = global[key]; + /* eslint-enable @typescript-eslint/no-explicit-any */ + return value !== undefined ? value : defaultValue; +} +/** + * Returns an array of unique values. + * @param values Values to make unique. + */ +function _unique(values) { + return Array.from(new Set(values)); +} +//# sourceMappingURL=tool-cache.js.map + +/***/ }), + +/***/ 562: +/***/ ((module, exports) => { + +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 + +function tok (n) { + t[n] = R++ +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' + +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' + +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' + +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' + +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' + +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +var tildeTrimReplace = '$1~' + +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' + +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +var caretTrimReplace = '$1^' + +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? re[t.LOOSE] : re[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + if (this.value === '') { + return true + } + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[t.COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) +} + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[t.STAR], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + var match = null + if (!options.rtl) { + match = version.match(re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = re[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + re[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) +} + + +/***/ }), + +/***/ 701: +/***/ ((module) => { + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} + +module.exports = bytesToUuid; + + +/***/ }), + +/***/ 269: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = __nccwpck_require__(417); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; + + +/***/ }), + +/***/ 468: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(269); +var bytesToUuid = __nccwpck_require__(701); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; + + +/***/ }), + +/***/ 885: +/***/ ((module) => { + +const { hasOwnProperty } = Object.prototype + +const eol = typeof process !== 'undefined' && + process.platform === 'win32' ? '\r\n' : '\n' + +const encode = (obj, opt) => { + const children = [] + let out = '' + + if (typeof opt === 'string') { + opt = { + section: opt, + whitespace: false, + } + } else { + opt = opt || Object.create(null) + opt.whitespace = opt.whitespace === true + } + + const separator = opt.whitespace ? ' = ' : '=' + + for (const k of Object.keys(obj)) { + const val = obj[k] + if (val && Array.isArray(val)) { + for (const item of val) + out += safe(k + '[]') + separator + safe(item) + '\n' + } else if (val && typeof val === 'object') + children.push(k) + else + out += safe(k) + separator + safe(val) + eol + } + + if (opt.section && out.length) + out = '[' + safe(opt.section) + ']' + eol + out + + for (const k of children) { + const nk = dotSplit(k).join('\\.') + const section = (opt.section ? opt.section + '.' : '') + nk + const { whitespace } = opt + const child = encode(obj[k], { + section, + whitespace, + }) + if (out.length && child.length) + out += eol + + out += child + } + + return out +} + +const dotSplit = str => + str.replace(/\1/g, '\u0002LITERAL\\1LITERAL\u0002') + .replace(/\\\./g, '\u0001') + .split(/\./) + .map(part => + part.replace(/\1/g, '\\.') + .replace(/\2LITERAL\\1LITERAL\2/g, '\u0001')) + +const decode = str => { + const out = Object.create(null) + let p = out + let section = null + // section |key = value + const re = /^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i + const lines = str.split(/[\r\n]+/g) + + for (const line of lines) { + if (!line || line.match(/^\s*[;#]/)) + continue + const match = line.match(re) + if (!match) + continue + if (match[1] !== undefined) { + section = unsafe(match[1]) + if (section === '__proto__') { + // not allowed + // keep parsing the section, but don't attach it. + p = Object.create(null) + continue + } + p = out[section] = out[section] || Object.create(null) + continue + } + const keyRaw = unsafe(match[2]) + const isArray = keyRaw.length > 2 && keyRaw.slice(-2) === '[]' + const key = isArray ? keyRaw.slice(0, -2) : keyRaw + if (key === '__proto__') + continue + const valueRaw = match[3] ? unsafe(match[4]) : true + const value = valueRaw === 'true' || + valueRaw === 'false' || + valueRaw === 'null' ? JSON.parse(valueRaw) + : valueRaw + + // Convert keys with '[]' suffix to an array + if (isArray) { + if (!hasOwnProperty.call(p, key)) + p[key] = [] + else if (!Array.isArray(p[key])) + p[key] = [p[key]] + } + + // safeguard against resetting a previously defined + // array by accidentally forgetting the brackets + if (Array.isArray(p[key])) + p[key].push(value) + else + p[key] = value + } + + // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}} + // use a filter to return the keys that have to be deleted. + const remove = [] + for (const k of Object.keys(out)) { + if (!hasOwnProperty.call(out, k) || + typeof out[k] !== 'object' || + Array.isArray(out[k])) + continue + + // see if the parent section is also an object. + // if so, add it to that, and mark this one for deletion + const parts = dotSplit(k) + let p = out + const l = parts.pop() + const nl = l.replace(/\\\./g, '.') + for (const part of parts) { + if (part === '__proto__') + continue + if (!hasOwnProperty.call(p, part) || typeof p[part] !== 'object') + p[part] = Object.create(null) + p = p[part] + } + if (p === out && nl === l) + continue + + p[nl] = out[k] + remove.push(k) + } + for (const del of remove) + delete out[del] + + return out +} + +const isQuoted = val => + (val.charAt(0) === '"' && val.slice(-1) === '"') || + (val.charAt(0) === "'" && val.slice(-1) === "'") + +const safe = val => + (typeof val !== 'string' || + val.match(/[=\r\n]/) || + val.match(/^\[/) || + (val.length > 1 && + isQuoted(val)) || + val !== val.trim()) + ? JSON.stringify(val) + : val.replace(/;/g, '\\;').replace(/#/g, '\\#') + +const unsafe = (val, doUnesc) => { + val = (val || '').trim() + if (isQuoted(val)) { + // remove the single quotes before calling JSON.parse + if (val.charAt(0) === "'") + val = val.substr(1, val.length - 2) + + try { + val = JSON.parse(val) + } catch (_) {} + } else { + // walk the val to find the first not-escaped ; character + let esc = false + let unesc = '' + for (let i = 0, l = val.length; i < l; i++) { + const c = val.charAt(i) + if (esc) { + if ('\\;#'.indexOf(c) !== -1) + unesc += c + else + unesc += '\\' + c + + esc = false + } else if (';#'.indexOf(c) !== -1) + break + else if (c === '\\') + esc = true + else + unesc += c + } + if (esc) + unesc += '\\' + + return unesc.trim() + } + return val +} + +module.exports = { + parse: decode, + decode, + stringify: encode, + encode, + safe, + unsafe, +} + + +/***/ }), + +/***/ 294: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(219); + + +/***/ }), + +/***/ 219: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var net = __nccwpck_require__(631); +var tls = __nccwpck_require__(16); +var http = __nccwpck_require__(605); +var https = __nccwpck_require__(211); +var events = __nccwpck_require__(614); +var assert = __nccwpck_require__(357); +var util = __nccwpck_require__(669); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), + +/***/ 983: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var PlainValue = __nccwpck_require__(215); +var resolveSeq = __nccwpck_require__(140); +var Schema = __nccwpck_require__(656); + +const defaultOptions = { + anchorPrefix: 'a', + customTags: null, + indent: 2, + indentSeq: true, + keepCstNodes: false, + keepNodeTypes: true, + keepBlobsInJSON: true, + mapAsMap: false, + maxAliasCount: 100, + prettyErrors: false, + // TODO Set true in v2 + simpleKeys: false, + version: '1.2' +}; +const scalarOptions = { + get binary() { + return resolveSeq.binaryOptions; + }, + + set binary(opt) { + Object.assign(resolveSeq.binaryOptions, opt); + }, + + get bool() { + return resolveSeq.boolOptions; + }, + + set bool(opt) { + Object.assign(resolveSeq.boolOptions, opt); + }, + + get int() { + return resolveSeq.intOptions; + }, + + set int(opt) { + Object.assign(resolveSeq.intOptions, opt); + }, + + get null() { + return resolveSeq.nullOptions; + }, + + set null(opt) { + Object.assign(resolveSeq.nullOptions, opt); + }, + + get str() { + return resolveSeq.strOptions; + }, + + set str(opt) { + Object.assign(resolveSeq.strOptions, opt); + } + +}; +const documentOptions = { + '1.0': { + schema: 'yaml-1.1', + merge: true, + tagPrefixes: [{ + handle: '!', + prefix: PlainValue.defaultTagPrefix + }, { + handle: '!!', + prefix: 'tag:private.yaml.org,2002:' + }] + }, + '1.1': { + schema: 'yaml-1.1', + merge: true, + tagPrefixes: [{ + handle: '!', + prefix: '!' + }, { + handle: '!!', + prefix: PlainValue.defaultTagPrefix + }] + }, + '1.2': { + schema: 'core', + merge: false, + tagPrefixes: [{ + handle: '!', + prefix: '!' + }, { + handle: '!!', + prefix: PlainValue.defaultTagPrefix + }] + } +}; + +function stringifyTag(doc, tag) { + if ((doc.version || doc.options.version) === '1.0') { + const priv = tag.match(/^tag:private\.yaml\.org,2002:([^:/]+)$/); + if (priv) return '!' + priv[1]; + const vocab = tag.match(/^tag:([a-zA-Z0-9-]+)\.yaml\.org,2002:(.*)/); + return vocab ? `!${vocab[1]}/${vocab[2]}` : `!${tag.replace(/^tag:/, '')}`; + } + + let p = doc.tagPrefixes.find(p => tag.indexOf(p.prefix) === 0); + + if (!p) { + const dtp = doc.getDefaults().tagPrefixes; + p = dtp && dtp.find(p => tag.indexOf(p.prefix) === 0); + } + + if (!p) return tag[0] === '!' ? tag : `!<${tag}>`; + const suffix = tag.substr(p.prefix.length).replace(/[!,[\]{}]/g, ch => ({ + '!': '%21', + ',': '%2C', + '[': '%5B', + ']': '%5D', + '{': '%7B', + '}': '%7D' + })[ch]); + return p.handle + suffix; +} + +function getTagObject(tags, item) { + if (item instanceof resolveSeq.Alias) return resolveSeq.Alias; + + if (item.tag) { + const match = tags.filter(t => t.tag === item.tag); + if (match.length > 0) return match.find(t => t.format === item.format) || match[0]; + } + + let tagObj, obj; + + if (item instanceof resolveSeq.Scalar) { + obj = item.value; // TODO: deprecate/remove class check + + const match = tags.filter(t => t.identify && t.identify(obj) || t.class && obj instanceof t.class); + tagObj = match.find(t => t.format === item.format) || match.find(t => !t.format); + } else { + obj = item; + tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); + } + + if (!tagObj) { + const name = obj && obj.constructor ? obj.constructor.name : typeof obj; + throw new Error(`Tag not resolved for ${name} value`); + } + + return tagObj; +} // needs to be called before value stringifier to allow for circular anchor refs + + +function stringifyProps(node, tagObj, { + anchors, + doc +}) { + const props = []; + const anchor = doc.anchors.getName(node); + + if (anchor) { + anchors[anchor] = node; + props.push(`&${anchor}`); + } + + if (node.tag) { + props.push(stringifyTag(doc, node.tag)); + } else if (!tagObj.default) { + props.push(stringifyTag(doc, tagObj.tag)); + } + + return props.join(' '); +} + +function stringify(item, ctx, onComment, onChompKeep) { + const { + anchors, + schema + } = ctx.doc; + let tagObj; + + if (!(item instanceof resolveSeq.Node)) { + const createCtx = { + aliasNodes: [], + onTagObj: o => tagObj = o, + prevObjects: new Map() + }; + item = schema.createNode(item, true, null, createCtx); + + for (const alias of createCtx.aliasNodes) { + alias.source = alias.source.node; + let name = anchors.getName(alias.source); + + if (!name) { + name = anchors.newName(); + anchors.map[name] = alias.source; + } + } + } + + if (item instanceof resolveSeq.Pair) return item.toString(ctx, onComment, onChompKeep); + if (!tagObj) tagObj = getTagObject(schema.tags, item); + const props = stringifyProps(item, tagObj, ctx); + if (props.length > 0) ctx.indentAtStart = (ctx.indentAtStart || 0) + props.length + 1; + const str = typeof tagObj.stringify === 'function' ? tagObj.stringify(item, ctx, onComment, onChompKeep) : item instanceof resolveSeq.Scalar ? resolveSeq.stringifyString(item, ctx, onComment, onChompKeep) : item.toString(ctx, onComment, onChompKeep); + if (!props) return str; + return item instanceof resolveSeq.Scalar || str[0] === '{' || str[0] === '[' ? `${props} ${str}` : `${props}\n${ctx.indent}${str}`; +} + +class Anchors { + static validAnchorNode(node) { + return node instanceof resolveSeq.Scalar || node instanceof resolveSeq.YAMLSeq || node instanceof resolveSeq.YAMLMap; + } + + constructor(prefix) { + PlainValue._defineProperty(this, "map", {}); + + this.prefix = prefix; + } + + createAlias(node, name) { + this.setAnchor(node, name); + return new resolveSeq.Alias(node); + } + + createMergePair(...sources) { + const merge = new resolveSeq.Merge(); + merge.value.items = sources.map(s => { + if (s instanceof resolveSeq.Alias) { + if (s.source instanceof resolveSeq.YAMLMap) return s; + } else if (s instanceof resolveSeq.YAMLMap) { + return this.createAlias(s); + } + + throw new Error('Merge sources must be Map nodes or their Aliases'); + }); + return merge; + } + + getName(node) { + const { + map + } = this; + return Object.keys(map).find(a => map[a] === node); + } + + getNames() { + return Object.keys(this.map); + } + + getNode(name) { + return this.map[name]; + } + + newName(prefix) { + if (!prefix) prefix = this.prefix; + const names = Object.keys(this.map); + + for (let i = 1; true; ++i) { + const name = `${prefix}${i}`; + if (!names.includes(name)) return name; + } + } // During parsing, map & aliases contain CST nodes + + + resolveNodes() { + const { + map, + _cstAliases + } = this; + Object.keys(map).forEach(a => { + map[a] = map[a].resolved; + }); + + _cstAliases.forEach(a => { + a.source = a.source.resolved; + }); + + delete this._cstAliases; + } + + setAnchor(node, name) { + if (node != null && !Anchors.validAnchorNode(node)) { + throw new Error('Anchors may only be set for Scalar, Seq and Map nodes'); + } + + if (name && /[\x00-\x19\s,[\]{}]/.test(name)) { + throw new Error('Anchor names must not contain whitespace or control characters'); + } + + const { + map + } = this; + const prev = node && Object.keys(map).find(a => map[a] === node); + + if (prev) { + if (!name) { + return prev; + } else if (prev !== name) { + delete map[prev]; + map[name] = node; + } + } else { + if (!name) { + if (!node) return null; + name = this.newName(); + } + + map[name] = node; + } + + return name; + } + +} + +const visit = (node, tags) => { + if (node && typeof node === 'object') { + const { + tag + } = node; + + if (node instanceof resolveSeq.Collection) { + if (tag) tags[tag] = true; + node.items.forEach(n => visit(n, tags)); + } else if (node instanceof resolveSeq.Pair) { + visit(node.key, tags); + visit(node.value, tags); + } else if (node instanceof resolveSeq.Scalar) { + if (tag) tags[tag] = true; + } + } + + return tags; +}; + +const listTagNames = node => Object.keys(visit(node, {})); + +function parseContents(doc, contents) { + const comments = { + before: [], + after: [] + }; + let body = undefined; + let spaceBefore = false; + + for (const node of contents) { + if (node.valueRange) { + if (body !== undefined) { + const msg = 'Document contains trailing content not separated by a ... or --- line'; + doc.errors.push(new PlainValue.YAMLSyntaxError(node, msg)); + break; + } + + const res = resolveSeq.resolveNode(doc, node); + + if (spaceBefore) { + res.spaceBefore = true; + spaceBefore = false; + } + + body = res; + } else if (node.comment !== null) { + const cc = body === undefined ? comments.before : comments.after; + cc.push(node.comment); + } else if (node.type === PlainValue.Type.BLANK_LINE) { + spaceBefore = true; + + if (body === undefined && comments.before.length > 0 && !doc.commentBefore) { + // space-separated comments at start are parsed as document comments + doc.commentBefore = comments.before.join('\n'); + comments.before = []; + } + } + } + + doc.contents = body || null; + + if (!body) { + doc.comment = comments.before.concat(comments.after).join('\n') || null; + } else { + const cb = comments.before.join('\n'); + + if (cb) { + const cbNode = body instanceof resolveSeq.Collection && body.items[0] ? body.items[0] : body; + cbNode.commentBefore = cbNode.commentBefore ? `${cb}\n${cbNode.commentBefore}` : cb; + } + + doc.comment = comments.after.join('\n') || null; + } +} + +function resolveTagDirective({ + tagPrefixes +}, directive) { + const [handle, prefix] = directive.parameters; + + if (!handle || !prefix) { + const msg = 'Insufficient parameters given for %TAG directive'; + throw new PlainValue.YAMLSemanticError(directive, msg); + } + + if (tagPrefixes.some(p => p.handle === handle)) { + const msg = 'The %TAG directive must only be given at most once per handle in the same document.'; + throw new PlainValue.YAMLSemanticError(directive, msg); + } + + return { + handle, + prefix + }; +} + +function resolveYamlDirective(doc, directive) { + let [version] = directive.parameters; + if (directive.name === 'YAML:1.0') version = '1.0'; + + if (!version) { + const msg = 'Insufficient parameters given for %YAML directive'; + throw new PlainValue.YAMLSemanticError(directive, msg); + } + + if (!documentOptions[version]) { + const v0 = doc.version || doc.options.version; + const msg = `Document will be parsed as YAML ${v0} rather than YAML ${version}`; + doc.warnings.push(new PlainValue.YAMLWarning(directive, msg)); + } + + return version; +} + +function parseDirectives(doc, directives, prevDoc) { + const directiveComments = []; + let hasDirectives = false; + + for (const directive of directives) { + const { + comment, + name + } = directive; + + switch (name) { + case 'TAG': + try { + doc.tagPrefixes.push(resolveTagDirective(doc, directive)); + } catch (error) { + doc.errors.push(error); + } + + hasDirectives = true; + break; + + case 'YAML': + case 'YAML:1.0': + if (doc.version) { + const msg = 'The %YAML directive must only be given at most once per document.'; + doc.errors.push(new PlainValue.YAMLSemanticError(directive, msg)); + } + + try { + doc.version = resolveYamlDirective(doc, directive); + } catch (error) { + doc.errors.push(error); + } + + hasDirectives = true; + break; + + default: + if (name) { + const msg = `YAML only supports %TAG and %YAML directives, and not %${name}`; + doc.warnings.push(new PlainValue.YAMLWarning(directive, msg)); + } + + } + + if (comment) directiveComments.push(comment); + } + + if (prevDoc && !hasDirectives && '1.1' === (doc.version || prevDoc.version || doc.options.version)) { + const copyTagPrefix = ({ + handle, + prefix + }) => ({ + handle, + prefix + }); + + doc.tagPrefixes = prevDoc.tagPrefixes.map(copyTagPrefix); + doc.version = prevDoc.version; + } + + doc.commentBefore = directiveComments.join('\n') || null; +} + +function assertCollection(contents) { + if (contents instanceof resolveSeq.Collection) return true; + throw new Error('Expected a YAML collection as document contents'); +} + +class Document { + constructor(options) { + this.anchors = new Anchors(options.anchorPrefix); + this.commentBefore = null; + this.comment = null; + this.contents = null; + this.directivesEndMarker = null; + this.errors = []; + this.options = options; + this.schema = null; + this.tagPrefixes = []; + this.version = null; + this.warnings = []; + } + + add(value) { + assertCollection(this.contents); + return this.contents.add(value); + } + + addIn(path, value) { + assertCollection(this.contents); + this.contents.addIn(path, value); + } + + delete(key) { + assertCollection(this.contents); + return this.contents.delete(key); + } + + deleteIn(path) { + if (resolveSeq.isEmptyPath(path)) { + if (this.contents == null) return false; + this.contents = null; + return true; + } + + assertCollection(this.contents); + return this.contents.deleteIn(path); + } + + getDefaults() { + return Document.defaults[this.version] || Document.defaults[this.options.version] || {}; + } + + get(key, keepScalar) { + return this.contents instanceof resolveSeq.Collection ? this.contents.get(key, keepScalar) : undefined; + } + + getIn(path, keepScalar) { + if (resolveSeq.isEmptyPath(path)) return !keepScalar && this.contents instanceof resolveSeq.Scalar ? this.contents.value : this.contents; + return this.contents instanceof resolveSeq.Collection ? this.contents.getIn(path, keepScalar) : undefined; + } + + has(key) { + return this.contents instanceof resolveSeq.Collection ? this.contents.has(key) : false; + } + + hasIn(path) { + if (resolveSeq.isEmptyPath(path)) return this.contents !== undefined; + return this.contents instanceof resolveSeq.Collection ? this.contents.hasIn(path) : false; + } + + set(key, value) { + assertCollection(this.contents); + this.contents.set(key, value); + } + + setIn(path, value) { + if (resolveSeq.isEmptyPath(path)) this.contents = value;else { + assertCollection(this.contents); + this.contents.setIn(path, value); + } + } + + setSchema(id, customTags) { + if (!id && !customTags && this.schema) return; + if (typeof id === 'number') id = id.toFixed(1); + + if (id === '1.0' || id === '1.1' || id === '1.2') { + if (this.version) this.version = id;else this.options.version = id; + delete this.options.schema; + } else if (id && typeof id === 'string') { + this.options.schema = id; + } + + if (Array.isArray(customTags)) this.options.customTags = customTags; + const opt = Object.assign({}, this.getDefaults(), this.options); + this.schema = new Schema.Schema(opt); + } + + parse(node, prevDoc) { + if (this.options.keepCstNodes) this.cstNode = node; + if (this.options.keepNodeTypes) this.type = 'DOCUMENT'; + const { + directives = [], + contents = [], + directivesEndMarker, + error, + valueRange + } = node; + + if (error) { + if (!error.source) error.source = this; + this.errors.push(error); + } + + parseDirectives(this, directives, prevDoc); + if (directivesEndMarker) this.directivesEndMarker = true; + this.range = valueRange ? [valueRange.start, valueRange.end] : null; + this.setSchema(); + this.anchors._cstAliases = []; + parseContents(this, contents); + this.anchors.resolveNodes(); + + if (this.options.prettyErrors) { + for (const error of this.errors) if (error instanceof PlainValue.YAMLError) error.makePretty(); + + for (const warn of this.warnings) if (warn instanceof PlainValue.YAMLError) warn.makePretty(); + } + + return this; + } + + listNonDefaultTags() { + return listTagNames(this.contents).filter(t => t.indexOf(Schema.Schema.defaultPrefix) !== 0); + } + + setTagPrefix(handle, prefix) { + if (handle[0] !== '!' || handle[handle.length - 1] !== '!') throw new Error('Handle must start and end with !'); + + if (prefix) { + const prev = this.tagPrefixes.find(p => p.handle === handle); + if (prev) prev.prefix = prefix;else this.tagPrefixes.push({ + handle, + prefix + }); + } else { + this.tagPrefixes = this.tagPrefixes.filter(p => p.handle !== handle); + } + } + + toJSON(arg, onAnchor) { + const { + keepBlobsInJSON, + mapAsMap, + maxAliasCount + } = this.options; + const keep = keepBlobsInJSON && (typeof arg !== 'string' || !(this.contents instanceof resolveSeq.Scalar)); + const ctx = { + doc: this, + indentStep: ' ', + keep, + mapAsMap: keep && !!mapAsMap, + maxAliasCount, + stringify // Requiring directly in Pair would create circular dependencies + + }; + const anchorNames = Object.keys(this.anchors.map); + if (anchorNames.length > 0) ctx.anchors = new Map(anchorNames.map(name => [this.anchors.map[name], { + alias: [], + aliasCount: 0, + count: 1 + }])); + const res = resolveSeq.toJSON(this.contents, arg, ctx); + if (typeof onAnchor === 'function' && ctx.anchors) for (const { + count, + res + } of ctx.anchors.values()) onAnchor(res, count); + return res; + } + + toString() { + if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified'); + const indentSize = this.options.indent; + + if (!Number.isInteger(indentSize) || indentSize <= 0) { + const s = JSON.stringify(indentSize); + throw new Error(`"indent" option must be a positive integer, not ${s}`); + } + + this.setSchema(); + const lines = []; + let hasDirectives = false; + + if (this.version) { + let vd = '%YAML 1.2'; + + if (this.schema.name === 'yaml-1.1') { + if (this.version === '1.0') vd = '%YAML:1.0';else if (this.version === '1.1') vd = '%YAML 1.1'; + } + + lines.push(vd); + hasDirectives = true; + } + + const tagNames = this.listNonDefaultTags(); + this.tagPrefixes.forEach(({ + handle, + prefix + }) => { + if (tagNames.some(t => t.indexOf(prefix) === 0)) { + lines.push(`%TAG ${handle} ${prefix}`); + hasDirectives = true; + } + }); + if (hasDirectives || this.directivesEndMarker) lines.push('---'); + + if (this.commentBefore) { + if (hasDirectives || !this.directivesEndMarker) lines.unshift(''); + lines.unshift(this.commentBefore.replace(/^/gm, '#')); + } + + const ctx = { + anchors: {}, + doc: this, + indent: '', + indentStep: ' '.repeat(indentSize), + stringify // Requiring directly in nodes would create circular dependencies + + }; + let chompKeep = false; + let contentComment = null; + + if (this.contents) { + if (this.contents instanceof resolveSeq.Node) { + if (this.contents.spaceBefore && (hasDirectives || this.directivesEndMarker)) lines.push(''); + if (this.contents.commentBefore) lines.push(this.contents.commentBefore.replace(/^/gm, '#')); // top-level block scalars need to be indented if followed by a comment + + ctx.forceBlockIndent = !!this.comment; + contentComment = this.contents.comment; + } + + const onChompKeep = contentComment ? null : () => chompKeep = true; + const body = stringify(this.contents, ctx, () => contentComment = null, onChompKeep); + lines.push(resolveSeq.addComment(body, '', contentComment)); + } else if (this.contents !== undefined) { + lines.push(stringify(this.contents, ctx)); + } + + if (this.comment) { + if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') lines.push(''); + lines.push(this.comment.replace(/^/gm, '#')); + } + + return lines.join('\n') + '\n'; + } + +} + +PlainValue._defineProperty(Document, "defaults", documentOptions); + +exports.Document = Document; +exports.defaultOptions = defaultOptions; +exports.scalarOptions = scalarOptions; + + +/***/ }), + +/***/ 215: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const Char = { + ANCHOR: '&', + COMMENT: '#', + TAG: '!', + DIRECTIVES_END: '-', + DOCUMENT_END: '.' +}; +const Type = { + ALIAS: 'ALIAS', + BLANK_LINE: 'BLANK_LINE', + BLOCK_FOLDED: 'BLOCK_FOLDED', + BLOCK_LITERAL: 'BLOCK_LITERAL', + COMMENT: 'COMMENT', + DIRECTIVE: 'DIRECTIVE', + DOCUMENT: 'DOCUMENT', + FLOW_MAP: 'FLOW_MAP', + FLOW_SEQ: 'FLOW_SEQ', + MAP: 'MAP', + MAP_KEY: 'MAP_KEY', + MAP_VALUE: 'MAP_VALUE', + PLAIN: 'PLAIN', + QUOTE_DOUBLE: 'QUOTE_DOUBLE', + QUOTE_SINGLE: 'QUOTE_SINGLE', + SEQ: 'SEQ', + SEQ_ITEM: 'SEQ_ITEM' +}; +const defaultTagPrefix = 'tag:yaml.org,2002:'; +const defaultTags = { + MAP: 'tag:yaml.org,2002:map', + SEQ: 'tag:yaml.org,2002:seq', + STR: 'tag:yaml.org,2002:str' +}; + +function findLineStarts(src) { + const ls = [0]; + let offset = src.indexOf('\n'); + + while (offset !== -1) { + offset += 1; + ls.push(offset); + offset = src.indexOf('\n', offset); + } + + return ls; +} + +function getSrcInfo(cst) { + let lineStarts, src; + + if (typeof cst === 'string') { + lineStarts = findLineStarts(cst); + src = cst; + } else { + if (Array.isArray(cst)) cst = cst[0]; + + if (cst && cst.context) { + if (!cst.lineStarts) cst.lineStarts = findLineStarts(cst.context.src); + lineStarts = cst.lineStarts; + src = cst.context.src; + } + } + + return { + lineStarts, + src + }; +} +/** + * @typedef {Object} LinePos - One-indexed position in the source + * @property {number} line + * @property {number} col + */ + +/** + * Determine the line/col position matching a character offset. + * + * Accepts a source string or a CST document as the second parameter. With + * the latter, starting indices for lines are cached in the document as + * `lineStarts: number[]`. + * + * Returns a one-indexed `{ line, col }` location if found, or + * `undefined` otherwise. + * + * @param {number} offset + * @param {string|Document|Document[]} cst + * @returns {?LinePos} + */ + + +function getLinePos(offset, cst) { + if (typeof offset !== 'number' || offset < 0) return null; + const { + lineStarts, + src + } = getSrcInfo(cst); + if (!lineStarts || !src || offset > src.length) return null; + + for (let i = 0; i < lineStarts.length; ++i) { + const start = lineStarts[i]; + + if (offset < start) { + return { + line: i, + col: offset - lineStarts[i - 1] + 1 + }; + } + + if (offset === start) return { + line: i + 1, + col: 1 + }; + } + + const line = lineStarts.length; + return { + line, + col: offset - lineStarts[line - 1] + 1 + }; +} +/** + * Get a specified line from the source. + * + * Accepts a source string or a CST document as the second parameter. With + * the latter, starting indices for lines are cached in the document as + * `lineStarts: number[]`. + * + * Returns the line as a string if found, or `null` otherwise. + * + * @param {number} line One-indexed line number + * @param {string|Document|Document[]} cst + * @returns {?string} + */ + +function getLine(line, cst) { + const { + lineStarts, + src + } = getSrcInfo(cst); + if (!lineStarts || !(line >= 1) || line > lineStarts.length) return null; + const start = lineStarts[line - 1]; + let end = lineStarts[line]; // undefined for last line; that's ok for slice() + + while (end && end > start && src[end - 1] === '\n') --end; + + return src.slice(start, end); +} +/** + * Pretty-print the starting line from the source indicated by the range `pos` + * + * Trims output to `maxWidth` chars while keeping the starting column visible, + * using `…` at either end to indicate dropped characters. + * + * Returns a two-line string (or `null`) with `\n` as separator; the second line + * will hold appropriately indented `^` marks indicating the column range. + * + * @param {Object} pos + * @param {LinePos} pos.start + * @param {LinePos} [pos.end] + * @param {string|Document|Document[]*} cst + * @param {number} [maxWidth=80] + * @returns {?string} + */ + +function getPrettyContext({ + start, + end +}, cst, maxWidth = 80) { + let src = getLine(start.line, cst); + if (!src) return null; + let { + col + } = start; + + if (src.length > maxWidth) { + if (col <= maxWidth - 10) { + src = src.substr(0, maxWidth - 1) + '…'; + } else { + const halfWidth = Math.round(maxWidth / 2); + if (src.length > col + halfWidth) src = src.substr(0, col + halfWidth - 1) + '…'; + col -= src.length - maxWidth; + src = '…' + src.substr(1 - maxWidth); + } + } + + let errLen = 1; + let errEnd = ''; + + if (end) { + if (end.line === start.line && col + (end.col - start.col) <= maxWidth + 1) { + errLen = end.col - start.col; + } else { + errLen = Math.min(src.length + 1, maxWidth) - col; + errEnd = '…'; + } + } + + const offset = col > 1 ? ' '.repeat(col - 1) : ''; + const err = '^'.repeat(errLen); + return `${src}\n${offset}${err}${errEnd}`; +} + +class Range { + static copy(orig) { + return new Range(orig.start, orig.end); + } + + constructor(start, end) { + this.start = start; + this.end = end || start; + } + + isEmpty() { + return typeof this.start !== 'number' || !this.end || this.end <= this.start; + } + /** + * Set `origStart` and `origEnd` to point to the original source range for + * this node, which may differ due to dropped CR characters. + * + * @param {number[]} cr - Positions of dropped CR characters + * @param {number} offset - Starting index of `cr` from the last call + * @returns {number} - The next offset, matching the one found for `origStart` + */ + + + setOrigRange(cr, offset) { + const { + start, + end + } = this; + + if (cr.length === 0 || end <= cr[0]) { + this.origStart = start; + this.origEnd = end; + return offset; + } + + let i = offset; + + while (i < cr.length) { + if (cr[i] > start) break;else ++i; + } + + this.origStart = start + i; + const nextOffset = i; + + while (i < cr.length) { + // if end was at \n, it should now be at \r + if (cr[i] >= end) break;else ++i; + } + + this.origEnd = end + i; + return nextOffset; + } + +} + +/** Root class of all nodes */ + +class Node { + static addStringTerminator(src, offset, str) { + if (str[str.length - 1] === '\n') return str; + const next = Node.endOfWhiteSpace(src, offset); + return next >= src.length || src[next] === '\n' ? str + '\n' : str; + } // ^(---|...) + + + static atDocumentBoundary(src, offset, sep) { + const ch0 = src[offset]; + if (!ch0) return true; + const prev = src[offset - 1]; + if (prev && prev !== '\n') return false; + + if (sep) { + if (ch0 !== sep) return false; + } else { + if (ch0 !== Char.DIRECTIVES_END && ch0 !== Char.DOCUMENT_END) return false; + } + + const ch1 = src[offset + 1]; + const ch2 = src[offset + 2]; + if (ch1 !== ch0 || ch2 !== ch0) return false; + const ch3 = src[offset + 3]; + return !ch3 || ch3 === '\n' || ch3 === '\t' || ch3 === ' '; + } + + static endOfIdentifier(src, offset) { + let ch = src[offset]; + const isVerbatim = ch === '<'; + const notOk = isVerbatim ? ['\n', '\t', ' ', '>'] : ['\n', '\t', ' ', '[', ']', '{', '}', ',']; + + while (ch && notOk.indexOf(ch) === -1) ch = src[offset += 1]; + + if (isVerbatim && ch === '>') offset += 1; + return offset; + } + + static endOfIndent(src, offset) { + let ch = src[offset]; + + while (ch === ' ') ch = src[offset += 1]; + + return offset; + } + + static endOfLine(src, offset) { + let ch = src[offset]; + + while (ch && ch !== '\n') ch = src[offset += 1]; + + return offset; + } + + static endOfWhiteSpace(src, offset) { + let ch = src[offset]; + + while (ch === '\t' || ch === ' ') ch = src[offset += 1]; + + return offset; + } + + static startOfLine(src, offset) { + let ch = src[offset - 1]; + if (ch === '\n') return offset; + + while (ch && ch !== '\n') ch = src[offset -= 1]; + + return offset + 1; + } + /** + * End of indentation, or null if the line's indent level is not more + * than `indent` + * + * @param {string} src + * @param {number} indent + * @param {number} lineStart + * @returns {?number} + */ + + + static endOfBlockIndent(src, indent, lineStart) { + const inEnd = Node.endOfIndent(src, lineStart); + + if (inEnd > lineStart + indent) { + return inEnd; + } else { + const wsEnd = Node.endOfWhiteSpace(src, inEnd); + const ch = src[wsEnd]; + if (!ch || ch === '\n') return wsEnd; + } + + return null; + } + + static atBlank(src, offset, endAsBlank) { + const ch = src[offset]; + return ch === '\n' || ch === '\t' || ch === ' ' || endAsBlank && !ch; + } + + static nextNodeIsIndented(ch, indentDiff, indicatorAsIndent) { + if (!ch || indentDiff < 0) return false; + if (indentDiff > 0) return true; + return indicatorAsIndent && ch === '-'; + } // should be at line or string end, or at next non-whitespace char + + + static normalizeOffset(src, offset) { + const ch = src[offset]; + return !ch ? offset : ch !== '\n' && src[offset - 1] === '\n' ? offset - 1 : Node.endOfWhiteSpace(src, offset); + } // fold single newline into space, multiple newlines to N - 1 newlines + // presumes src[offset] === '\n' + + + static foldNewline(src, offset, indent) { + let inCount = 0; + let error = false; + let fold = ''; + let ch = src[offset + 1]; + + while (ch === ' ' || ch === '\t' || ch === '\n') { + switch (ch) { + case '\n': + inCount = 0; + offset += 1; + fold += '\n'; + break; + + case '\t': + if (inCount <= indent) error = true; + offset = Node.endOfWhiteSpace(src, offset + 2) - 1; + break; + + case ' ': + inCount += 1; + offset += 1; + break; + } + + ch = src[offset + 1]; + } + + if (!fold) fold = ' '; + if (ch && inCount <= indent) error = true; + return { + fold, + offset, + error + }; + } + + constructor(type, props, context) { + Object.defineProperty(this, 'context', { + value: context || null, + writable: true + }); + this.error = null; + this.range = null; + this.valueRange = null; + this.props = props || []; + this.type = type; + this.value = null; + } + + getPropValue(idx, key, skipKey) { + if (!this.context) return null; + const { + src + } = this.context; + const prop = this.props[idx]; + return prop && src[prop.start] === key ? src.slice(prop.start + (skipKey ? 1 : 0), prop.end) : null; + } + + get anchor() { + for (let i = 0; i < this.props.length; ++i) { + const anchor = this.getPropValue(i, Char.ANCHOR, true); + if (anchor != null) return anchor; + } + + return null; + } + + get comment() { + const comments = []; + + for (let i = 0; i < this.props.length; ++i) { + const comment = this.getPropValue(i, Char.COMMENT, true); + if (comment != null) comments.push(comment); + } + + return comments.length > 0 ? comments.join('\n') : null; + } + + commentHasRequiredWhitespace(start) { + const { + src + } = this.context; + if (this.header && start === this.header.end) return false; + if (!this.valueRange) return false; + const { + end + } = this.valueRange; + return start !== end || Node.atBlank(src, end - 1); + } + + get hasComment() { + if (this.context) { + const { + src + } = this.context; + + for (let i = 0; i < this.props.length; ++i) { + if (src[this.props[i].start] === Char.COMMENT) return true; + } + } + + return false; + } + + get hasProps() { + if (this.context) { + const { + src + } = this.context; + + for (let i = 0; i < this.props.length; ++i) { + if (src[this.props[i].start] !== Char.COMMENT) return true; + } + } + + return false; + } + + get includesTrailingLines() { + return false; + } + + get jsonLike() { + const jsonLikeTypes = [Type.FLOW_MAP, Type.FLOW_SEQ, Type.QUOTE_DOUBLE, Type.QUOTE_SINGLE]; + return jsonLikeTypes.indexOf(this.type) !== -1; + } + + get rangeAsLinePos() { + if (!this.range || !this.context) return undefined; + const start = getLinePos(this.range.start, this.context.root); + if (!start) return undefined; + const end = getLinePos(this.range.end, this.context.root); + return { + start, + end + }; + } + + get rawValue() { + if (!this.valueRange || !this.context) return null; + const { + start, + end + } = this.valueRange; + return this.context.src.slice(start, end); + } + + get tag() { + for (let i = 0; i < this.props.length; ++i) { + const tag = this.getPropValue(i, Char.TAG, false); + + if (tag != null) { + if (tag[1] === '<') { + return { + verbatim: tag.slice(2, -1) + }; + } else { + // eslint-disable-next-line no-unused-vars + const [_, handle, suffix] = tag.match(/^(.*!)([^!]*)$/); + return { + handle, + suffix + }; + } + } + } + + return null; + } + + get valueRangeContainsNewline() { + if (!this.valueRange || !this.context) return false; + const { + start, + end + } = this.valueRange; + const { + src + } = this.context; + + for (let i = start; i < end; ++i) { + if (src[i] === '\n') return true; + } + + return false; + } + + parseComment(start) { + const { + src + } = this.context; + + if (src[start] === Char.COMMENT) { + const end = Node.endOfLine(src, start + 1); + const commentRange = new Range(start, end); + this.props.push(commentRange); + return end; + } + + return start; + } + /** + * Populates the `origStart` and `origEnd` values of all ranges for this + * node. Extended by child classes to handle descendant nodes. + * + * @param {number[]} cr - Positions of dropped CR characters + * @param {number} offset - Starting index of `cr` from the last call + * @returns {number} - The next offset, matching the one found for `origStart` + */ + + + setOrigRanges(cr, offset) { + if (this.range) offset = this.range.setOrigRange(cr, offset); + if (this.valueRange) this.valueRange.setOrigRange(cr, offset); + this.props.forEach(prop => prop.setOrigRange(cr, offset)); + return offset; + } + + toString() { + const { + context: { + src + }, + range, + value + } = this; + if (value != null) return value; + const str = src.slice(range.start, range.end); + return Node.addStringTerminator(src, range.end, str); + } + +} + +class YAMLError extends Error { + constructor(name, source, message) { + if (!message || !(source instanceof Node)) throw new Error(`Invalid arguments for new ${name}`); + super(); + this.name = name; + this.message = message; + this.source = source; + } + + makePretty() { + if (!this.source) return; + this.nodeType = this.source.type; + const cst = this.source.context && this.source.context.root; + + if (typeof this.offset === 'number') { + this.range = new Range(this.offset, this.offset + 1); + const start = cst && getLinePos(this.offset, cst); + + if (start) { + const end = { + line: start.line, + col: start.col + 1 + }; + this.linePos = { + start, + end + }; + } + + delete this.offset; + } else { + this.range = this.source.range; + this.linePos = this.source.rangeAsLinePos; + } + + if (this.linePos) { + const { + line, + col + } = this.linePos.start; + this.message += ` at line ${line}, column ${col}`; + const ctx = cst && getPrettyContext(this.linePos, cst); + if (ctx) this.message += `:\n\n${ctx}\n`; + } + + delete this.source; + } + +} +class YAMLReferenceError extends YAMLError { + constructor(source, message) { + super('YAMLReferenceError', source, message); + } + +} +class YAMLSemanticError extends YAMLError { + constructor(source, message) { + super('YAMLSemanticError', source, message); + } + +} +class YAMLSyntaxError extends YAMLError { + constructor(source, message) { + super('YAMLSyntaxError', source, message); + } + +} +class YAMLWarning extends YAMLError { + constructor(source, message) { + super('YAMLWarning', source, message); + } + +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +class PlainValue extends Node { + static endOfLine(src, start, inFlow) { + let ch = src[start]; + let offset = start; + + while (ch && ch !== '\n') { + if (inFlow && (ch === '[' || ch === ']' || ch === '{' || ch === '}' || ch === ',')) break; + const next = src[offset + 1]; + if (ch === ':' && (!next || next === '\n' || next === '\t' || next === ' ' || inFlow && next === ',')) break; + if ((ch === ' ' || ch === '\t') && next === '#') break; + offset += 1; + ch = next; + } + + return offset; + } + + get strValue() { + if (!this.valueRange || !this.context) return null; + let { + start, + end + } = this.valueRange; + const { + src + } = this.context; + let ch = src[end - 1]; + + while (start < end && (ch === '\n' || ch === '\t' || ch === ' ')) ch = src[--end - 1]; + + let str = ''; + + for (let i = start; i < end; ++i) { + const ch = src[i]; + + if (ch === '\n') { + const { + fold, + offset + } = Node.foldNewline(src, i, -1); + str += fold; + i = offset; + } else if (ch === ' ' || ch === '\t') { + // trim trailing whitespace + const wsStart = i; + let next = src[i + 1]; + + while (i < end && (next === ' ' || next === '\t')) { + i += 1; + next = src[i + 1]; + } + + if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; + } else { + str += ch; + } + } + + const ch0 = src[start]; + + switch (ch0) { + case '\t': + { + const msg = 'Plain value cannot start with a tab character'; + const errors = [new YAMLSemanticError(this, msg)]; + return { + errors, + str + }; + } + + case '@': + case '`': + { + const msg = `Plain value cannot start with reserved character ${ch0}`; + const errors = [new YAMLSemanticError(this, msg)]; + return { + errors, + str + }; + } + + default: + return str; + } + } + + parseBlockValue(start) { + const { + indent, + inFlow, + src + } = this.context; + let offset = start; + let valueEnd = start; + + for (let ch = src[offset]; ch === '\n'; ch = src[offset]) { + if (Node.atDocumentBoundary(src, offset + 1)) break; + const end = Node.endOfBlockIndent(src, indent, offset + 1); + if (end === null || src[end] === '#') break; + + if (src[end] === '\n') { + offset = end; + } else { + valueEnd = PlainValue.endOfLine(src, end, inFlow); + offset = valueEnd; + } + } + + if (this.valueRange.isEmpty()) this.valueRange.start = start; + this.valueRange.end = valueEnd; + return valueEnd; + } + /** + * Parses a plain value from the source + * + * Accepted forms are: + * ``` + * #comment + * + * first line + * + * first line #comment + * + * first line + * block + * lines + * + * #comment + * block + * lines + * ``` + * where block lines are empty or have an indent level greater than `indent`. + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar, may be `\n` + */ + + + parse(context, start) { + this.context = context; + const { + inFlow, + src + } = context; + let offset = start; + const ch = src[offset]; + + if (ch && ch !== '#' && ch !== '\n') { + offset = PlainValue.endOfLine(src, start, inFlow); + } + + this.valueRange = new Range(start, offset); + offset = Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + + if (!this.hasComment || this.valueRange.isEmpty()) { + offset = this.parseBlockValue(offset); + } + + return offset; + } + +} + +exports.Char = Char; +exports.Node = Node; +exports.PlainValue = PlainValue; +exports.Range = Range; +exports.Type = Type; +exports.YAMLError = YAMLError; +exports.YAMLReferenceError = YAMLReferenceError; +exports.YAMLSemanticError = YAMLSemanticError; +exports.YAMLSyntaxError = YAMLSyntaxError; +exports.YAMLWarning = YAMLWarning; +exports._defineProperty = _defineProperty; +exports.defaultTagPrefix = defaultTagPrefix; +exports.defaultTags = defaultTags; + + +/***/ }), + +/***/ 656: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var PlainValue = __nccwpck_require__(215); +var resolveSeq = __nccwpck_require__(140); +var warnings = __nccwpck_require__(383); + +function createMap(schema, obj, ctx) { + const map = new resolveSeq.YAMLMap(schema); + + if (obj instanceof Map) { + for (const [key, value] of obj) map.items.push(schema.createPair(key, value, ctx)); + } else if (obj && typeof obj === 'object') { + for (const key of Object.keys(obj)) map.items.push(schema.createPair(key, obj[key], ctx)); + } + + if (typeof schema.sortMapEntries === 'function') { + map.items.sort(schema.sortMapEntries); + } + + return map; +} + +const map = { + createNode: createMap, + default: true, + nodeClass: resolveSeq.YAMLMap, + tag: 'tag:yaml.org,2002:map', + resolve: resolveSeq.resolveMap +}; + +function createSeq(schema, obj, ctx) { + const seq = new resolveSeq.YAMLSeq(schema); + + if (obj && obj[Symbol.iterator]) { + for (const it of obj) { + const v = schema.createNode(it, ctx.wrapScalars, null, ctx); + seq.items.push(v); + } + } + + return seq; +} + +const seq = { + createNode: createSeq, + default: true, + nodeClass: resolveSeq.YAMLSeq, + tag: 'tag:yaml.org,2002:seq', + resolve: resolveSeq.resolveSeq +}; + +const string = { + identify: value => typeof value === 'string', + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: resolveSeq.resolveString, + + stringify(item, ctx, onComment, onChompKeep) { + ctx = Object.assign({ + actualString: true + }, ctx); + return resolveSeq.stringifyString(item, ctx, onComment, onChompKeep); + }, + + options: resolveSeq.strOptions +}; + +const failsafe = [map, seq, string]; + +/* global BigInt */ + +const intIdentify = value => typeof value === 'bigint' || Number.isInteger(value); + +const intResolve = (src, part, radix) => resolveSeq.intOptions.asBigInt ? BigInt(src) : parseInt(part, radix); + +function intStringify(node, radix, prefix) { + const { + value + } = node; + if (intIdentify(value) && value >= 0) return prefix + value.toString(radix); + return resolveSeq.stringifyNumber(node); +} + +const nullObj = { + identify: value => value == null, + createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null, + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^(?:~|[Nn]ull|NULL)?$/, + resolve: () => null, + options: resolveSeq.nullOptions, + stringify: () => resolveSeq.nullOptions.nullStr +}; +const boolObj = { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, + resolve: str => str[0] === 't' || str[0] === 'T', + options: resolveSeq.boolOptions, + stringify: ({ + value + }) => value ? resolveSeq.boolOptions.trueStr : resolveSeq.boolOptions.falseStr +}; +const octObj = { + identify: value => intIdentify(value) && value >= 0, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^0o([0-7]+)$/, + resolve: (str, oct) => intResolve(str, oct, 8), + options: resolveSeq.intOptions, + stringify: node => intStringify(node, 8, '0o') +}; +const intObj = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^[-+]?[0-9]+$/, + resolve: str => intResolve(str, str, 10), + options: resolveSeq.intOptions, + stringify: resolveSeq.stringifyNumber +}; +const hexObj = { + identify: value => intIdentify(value) && value >= 0, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^0x([0-9a-fA-F]+)$/, + resolve: (str, hex) => intResolve(str, hex, 16), + options: resolveSeq.intOptions, + stringify: node => intStringify(node, 16, '0x') +}; +const nanObj = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^(?:[-+]?\.inf|(\.nan))$/i, + resolve: (str, nan) => nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY, + stringify: resolveSeq.stringifyNumber +}; +const expObj = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, + resolve: str => parseFloat(str), + stringify: ({ + value + }) => Number(value).toExponential() +}; +const floatObj = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:\.([0-9]+)|[0-9]+\.([0-9]*))$/, + + resolve(str, frac1, frac2) { + const frac = frac1 || frac2; + const node = new resolveSeq.Scalar(parseFloat(str)); + if (frac && frac[frac.length - 1] === '0') node.minFractionDigits = frac.length; + return node; + }, + + stringify: resolveSeq.stringifyNumber +}; +const core = failsafe.concat([nullObj, boolObj, octObj, intObj, hexObj, nanObj, expObj, floatObj]); + +/* global BigInt */ + +const intIdentify$1 = value => typeof value === 'bigint' || Number.isInteger(value); + +const stringifyJSON = ({ + value +}) => JSON.stringify(value); + +const json = [map, seq, { + identify: value => typeof value === 'string', + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: resolveSeq.resolveString, + stringify: stringifyJSON +}, { + identify: value => value == null, + createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null, + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^null$/, + resolve: () => null, + stringify: stringifyJSON +}, { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^true|false$/, + resolve: str => str === 'true', + stringify: stringifyJSON +}, { + identify: intIdentify$1, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^-?(?:0|[1-9][0-9]*)$/, + resolve: str => resolveSeq.intOptions.asBigInt ? BigInt(str) : parseInt(str, 10), + stringify: ({ + value + }) => intIdentify$1(value) ? value.toString() : JSON.stringify(value) +}, { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, + resolve: str => parseFloat(str), + stringify: stringifyJSON +}]; + +json.scalarFallback = str => { + throw new SyntaxError(`Unresolved plain scalar ${JSON.stringify(str)}`); +}; + +/* global BigInt */ + +const boolStringify = ({ + value +}) => value ? resolveSeq.boolOptions.trueStr : resolveSeq.boolOptions.falseStr; + +const intIdentify$2 = value => typeof value === 'bigint' || Number.isInteger(value); + +function intResolve$1(sign, src, radix) { + let str = src.replace(/_/g, ''); + + if (resolveSeq.intOptions.asBigInt) { + switch (radix) { + case 2: + str = `0b${str}`; + break; + + case 8: + str = `0o${str}`; + break; + + case 16: + str = `0x${str}`; + break; + } + + const n = BigInt(str); + return sign === '-' ? BigInt(-1) * n : n; + } + + const n = parseInt(str, radix); + return sign === '-' ? -1 * n : n; +} + +function intStringify$1(node, radix, prefix) { + const { + value + } = node; + + if (intIdentify$2(value)) { + const str = value.toString(radix); + return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; + } + + return resolveSeq.stringifyNumber(node); +} + +const yaml11 = failsafe.concat([{ + identify: value => value == null, + createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null, + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^(?:~|[Nn]ull|NULL)?$/, + resolve: () => null, + options: resolveSeq.nullOptions, + stringify: () => resolveSeq.nullOptions.nullStr +}, { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, + resolve: () => true, + options: resolveSeq.boolOptions, + stringify: boolStringify +}, { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i, + resolve: () => false, + options: resolveSeq.boolOptions, + stringify: boolStringify +}, { + identify: intIdentify$2, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'BIN', + test: /^([-+]?)0b([0-1_]+)$/, + resolve: (str, sign, bin) => intResolve$1(sign, bin, 2), + stringify: node => intStringify$1(node, 2, '0b') +}, { + identify: intIdentify$2, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^([-+]?)0([0-7_]+)$/, + resolve: (str, sign, oct) => intResolve$1(sign, oct, 8), + stringify: node => intStringify$1(node, 8, '0') +}, { + identify: intIdentify$2, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^([-+]?)([0-9][0-9_]*)$/, + resolve: (str, sign, abs) => intResolve$1(sign, abs, 10), + stringify: resolveSeq.stringifyNumber +}, { + identify: intIdentify$2, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^([-+]?)0x([0-9a-fA-F_]+)$/, + resolve: (str, sign, hex) => intResolve$1(sign, hex, 16), + stringify: node => intStringify$1(node, 16, '0x') +}, { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^(?:[-+]?\.inf|(\.nan))$/i, + resolve: (str, nan) => nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY, + stringify: resolveSeq.stringifyNumber +}, { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?([0-9][0-9_]*)?(\.[0-9_]*)?[eE][-+]?[0-9]+$/, + resolve: str => parseFloat(str.replace(/_/g, '')), + stringify: ({ + value + }) => Number(value).toExponential() +}, { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:[0-9][0-9_]*)?\.([0-9_]*)$/, + + resolve(str, frac) { + const node = new resolveSeq.Scalar(parseFloat(str.replace(/_/g, ''))); + + if (frac) { + const f = frac.replace(/_/g, ''); + if (f[f.length - 1] === '0') node.minFractionDigits = f.length; + } + + return node; + }, + + stringify: resolveSeq.stringifyNumber +}], warnings.binary, warnings.omap, warnings.pairs, warnings.set, warnings.intTime, warnings.floatTime, warnings.timestamp); + +const schemas = { + core, + failsafe, + json, + yaml11 +}; +const tags = { + binary: warnings.binary, + bool: boolObj, + float: floatObj, + floatExp: expObj, + floatNaN: nanObj, + floatTime: warnings.floatTime, + int: intObj, + intHex: hexObj, + intOct: octObj, + intTime: warnings.intTime, + map, + null: nullObj, + omap: warnings.omap, + pairs: warnings.pairs, + seq, + set: warnings.set, + timestamp: warnings.timestamp +}; + +function findTagObject(value, tagName, tags) { + if (tagName) { + const match = tags.filter(t => t.tag === tagName); + const tagObj = match.find(t => !t.format) || match[0]; + if (!tagObj) throw new Error(`Tag ${tagName} not found`); + return tagObj; + } // TODO: deprecate/remove class check + + + return tags.find(t => (t.identify && t.identify(value) || t.class && value instanceof t.class) && !t.format); +} + +function createNode(value, tagName, ctx) { + if (value instanceof resolveSeq.Node) return value; + const { + defaultPrefix, + onTagObj, + prevObjects, + schema, + wrapScalars + } = ctx; + if (tagName && tagName.startsWith('!!')) tagName = defaultPrefix + tagName.slice(2); + let tagObj = findTagObject(value, tagName, schema.tags); + + if (!tagObj) { + if (typeof value.toJSON === 'function') value = value.toJSON(); + if (typeof value !== 'object') return wrapScalars ? new resolveSeq.Scalar(value) : value; + tagObj = value instanceof Map ? map : value[Symbol.iterator] ? seq : map; + } + + if (onTagObj) { + onTagObj(tagObj); + delete ctx.onTagObj; + } // Detect duplicate references to the same object & use Alias nodes for all + // after first. The `obj` wrapper allows for circular references to resolve. + + + const obj = {}; + + if (value && typeof value === 'object' && prevObjects) { + const prev = prevObjects.get(value); + + if (prev) { + const alias = new resolveSeq.Alias(prev); // leaves source dirty; must be cleaned by caller + + ctx.aliasNodes.push(alias); // defined along with prevObjects + + return alias; + } + + obj.value = value; + prevObjects.set(value, obj); + } + + obj.node = tagObj.createNode ? tagObj.createNode(ctx.schema, value, ctx) : wrapScalars ? new resolveSeq.Scalar(value) : value; + if (tagName && obj.node instanceof resolveSeq.Node) obj.node.tag = tagName; + return obj.node; +} + +function getSchemaTags(schemas, knownTags, customTags, schemaId) { + let tags = schemas[schemaId.replace(/\W/g, '')]; // 'yaml-1.1' -> 'yaml11' + + if (!tags) { + const keys = Object.keys(schemas).map(key => JSON.stringify(key)).join(', '); + throw new Error(`Unknown schema "${schemaId}"; use one of ${keys}`); + } + + if (Array.isArray(customTags)) { + for (const tag of customTags) tags = tags.concat(tag); + } else if (typeof customTags === 'function') { + tags = customTags(tags.slice()); + } + + for (let i = 0; i < tags.length; ++i) { + const tag = tags[i]; + + if (typeof tag === 'string') { + const tagObj = knownTags[tag]; + + if (!tagObj) { + const keys = Object.keys(knownTags).map(key => JSON.stringify(key)).join(', '); + throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); + } + + tags[i] = tagObj; + } + } + + return tags; +} + +const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; + +class Schema { + // TODO: remove in v2 + // TODO: remove in v2 + constructor({ + customTags, + merge, + schema, + sortMapEntries, + tags: deprecatedCustomTags + }) { + this.merge = !!merge; + this.name = schema; + this.sortMapEntries = sortMapEntries === true ? sortMapEntriesByKey : sortMapEntries || null; + if (!customTags && deprecatedCustomTags) warnings.warnOptionDeprecation('tags', 'customTags'); + this.tags = getSchemaTags(schemas, tags, customTags || deprecatedCustomTags, schema); + } + + createNode(value, wrapScalars, tagName, ctx) { + const baseCtx = { + defaultPrefix: Schema.defaultPrefix, + schema: this, + wrapScalars + }; + const createCtx = ctx ? Object.assign(ctx, baseCtx) : baseCtx; + return createNode(value, tagName, createCtx); + } + + createPair(key, value, ctx) { + if (!ctx) ctx = { + wrapScalars: true + }; + const k = this.createNode(key, ctx.wrapScalars, null, ctx); + const v = this.createNode(value, ctx.wrapScalars, null, ctx); + return new resolveSeq.Pair(k, v); + } + +} + +PlainValue._defineProperty(Schema, "defaultPrefix", PlainValue.defaultTagPrefix); + +PlainValue._defineProperty(Schema, "defaultTags", PlainValue.defaultTags); + +exports.Schema = Schema; + + +/***/ }), + +/***/ 65: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var PlainValue = __nccwpck_require__(215); +var parseCst = __nccwpck_require__(445); +__nccwpck_require__(140); +var Document$1 = __nccwpck_require__(983); +var Schema = __nccwpck_require__(656); +var warnings = __nccwpck_require__(383); + +function createNode(value, wrapScalars = true, tag) { + if (tag === undefined && typeof wrapScalars === 'string') { + tag = wrapScalars; + wrapScalars = true; + } + + const options = Object.assign({}, Document$1.Document.defaults[Document$1.defaultOptions.version], Document$1.defaultOptions); + const schema = new Schema.Schema(options); + return schema.createNode(value, wrapScalars, tag); +} + +class Document extends Document$1.Document { + constructor(options) { + super(Object.assign({}, Document$1.defaultOptions, options)); + } + +} + +function parseAllDocuments(src, options) { + const stream = []; + let prev; + + for (const cstDoc of parseCst.parse(src)) { + const doc = new Document(options); + doc.parse(cstDoc, prev); + stream.push(doc); + prev = doc; + } + + return stream; +} + +function parseDocument(src, options) { + const cst = parseCst.parse(src); + const doc = new Document(options).parse(cst[0]); + + if (cst.length > 1) { + const errMsg = 'Source contains multiple documents; please use YAML.parseAllDocuments()'; + doc.errors.unshift(new PlainValue.YAMLSemanticError(cst[1], errMsg)); + } + + return doc; +} + +function parse(src, options) { + const doc = parseDocument(src, options); + doc.warnings.forEach(warning => warnings.warn(warning)); + if (doc.errors.length > 0) throw doc.errors[0]; + return doc.toJSON(); +} + +function stringify(value, options) { + const doc = new Document(options); + doc.contents = value; + return String(doc); +} + +const YAML = { + createNode, + defaultOptions: Document$1.defaultOptions, + Document, + parse, + parseAllDocuments, + parseCST: parseCst.parse, + parseDocument, + scalarOptions: Document$1.scalarOptions, + stringify +}; + +exports.YAML = YAML; + + +/***/ }), + +/***/ 445: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var PlainValue = __nccwpck_require__(215); + +class BlankLine extends PlainValue.Node { + constructor() { + super(PlainValue.Type.BLANK_LINE); + } + /* istanbul ignore next */ + + + get includesTrailingLines() { + // This is never called from anywhere, but if it were, + // this is the value it should return. + return true; + } + /** + * Parses a blank line from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first \n character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + this.context = context; + this.range = new PlainValue.Range(start, start + 1); + return start + 1; + } + +} + +class CollectionItem extends PlainValue.Node { + constructor(type, props) { + super(type, props); + this.node = null; + } + + get includesTrailingLines() { + return !!this.node && this.node.includesTrailingLines; + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + this.context = context; + const { + parseNode, + src + } = context; + let { + atLineStart, + lineStart + } = context; + if (!atLineStart && this.type === PlainValue.Type.SEQ_ITEM) this.error = new PlainValue.YAMLSemanticError(this, 'Sequence items must not have preceding content on the same line'); + const indent = atLineStart ? start - lineStart : context.indent; + let offset = PlainValue.Node.endOfWhiteSpace(src, start + 1); + let ch = src[offset]; + const inlineComment = ch === '#'; + const comments = []; + let blankLine = null; + + while (ch === '\n' || ch === '#') { + if (ch === '#') { + const end = PlainValue.Node.endOfLine(src, offset + 1); + comments.push(new PlainValue.Range(offset, end)); + offset = end; + } else { + atLineStart = true; + lineStart = offset + 1; + const wsEnd = PlainValue.Node.endOfWhiteSpace(src, lineStart); + + if (src[wsEnd] === '\n' && comments.length === 0) { + blankLine = new BlankLine(); + lineStart = blankLine.parse({ + src + }, lineStart); + } + + offset = PlainValue.Node.endOfIndent(src, lineStart); + } + + ch = src[offset]; + } + + if (PlainValue.Node.nextNodeIsIndented(ch, offset - (lineStart + indent), this.type !== PlainValue.Type.SEQ_ITEM)) { + this.node = parseNode({ + atLineStart, + inCollection: false, + indent, + lineStart, + parent: this + }, offset); + } else if (ch && lineStart > start + 1) { + offset = lineStart - 1; + } + + if (this.node) { + if (blankLine) { + // Only blank lines preceding non-empty nodes are captured. Note that + // this means that collection item range start indices do not always + // increase monotonically. -- eemeli/yaml#126 + const items = context.parent.items || context.parent.contents; + if (items) items.push(blankLine); + } + + if (comments.length) Array.prototype.push.apply(this.props, comments); + offset = this.node.range.end; + } else { + if (inlineComment) { + const c = comments[0]; + this.props.push(c); + offset = c.end; + } else { + offset = PlainValue.Node.endOfLine(src, start + 1); + } + } + + const end = this.node ? this.node.valueRange.end : offset; + this.valueRange = new PlainValue.Range(start, end); + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + return this.node ? this.node.setOrigRanges(cr, offset) : offset; + } + + toString() { + const { + context: { + src + }, + node, + range, + value + } = this; + if (value != null) return value; + const str = node ? src.slice(range.start, node.range.start) + String(node) : src.slice(range.start, range.end); + return PlainValue.Node.addStringTerminator(src, range.end, str); + } + +} + +class Comment extends PlainValue.Node { + constructor() { + super(PlainValue.Type.COMMENT); + } + /** + * Parses a comment line from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + + + parse(context, start) { + this.context = context; + const offset = this.parseComment(start); + this.range = new PlainValue.Range(start, offset); + return offset; + } + +} + +function grabCollectionEndComments(node) { + let cnode = node; + + while (cnode instanceof CollectionItem) cnode = cnode.node; + + if (!(cnode instanceof Collection)) return null; + const len = cnode.items.length; + let ci = -1; + + for (let i = len - 1; i >= 0; --i) { + const n = cnode.items[i]; + + if (n.type === PlainValue.Type.COMMENT) { + // Keep sufficiently indented comments with preceding node + const { + indent, + lineStart + } = n.context; + if (indent > 0 && n.range.start >= lineStart + indent) break; + ci = i; + } else if (n.type === PlainValue.Type.BLANK_LINE) ci = i;else break; + } + + if (ci === -1) return null; + const ca = cnode.items.splice(ci, len - ci); + const prevEnd = ca[0].range.start; + + while (true) { + cnode.range.end = prevEnd; + if (cnode.valueRange && cnode.valueRange.end > prevEnd) cnode.valueRange.end = prevEnd; + if (cnode === node) break; + cnode = cnode.context.parent; + } + + return ca; +} +class Collection extends PlainValue.Node { + static nextContentHasIndent(src, offset, indent) { + const lineStart = PlainValue.Node.endOfLine(src, offset) + 1; + offset = PlainValue.Node.endOfWhiteSpace(src, lineStart); + const ch = src[offset]; + if (!ch) return false; + if (offset >= lineStart + indent) return true; + if (ch !== '#' && ch !== '\n') return false; + return Collection.nextContentHasIndent(src, offset, indent); + } + + constructor(firstItem) { + super(firstItem.type === PlainValue.Type.SEQ_ITEM ? PlainValue.Type.SEQ : PlainValue.Type.MAP); + + for (let i = firstItem.props.length - 1; i >= 0; --i) { + if (firstItem.props[i].start < firstItem.context.lineStart) { + // props on previous line are assumed by the collection + this.props = firstItem.props.slice(0, i + 1); + firstItem.props = firstItem.props.slice(i + 1); + const itemRange = firstItem.props[0] || firstItem.valueRange; + firstItem.range.start = itemRange.start; + break; + } + } + + this.items = [firstItem]; + const ec = grabCollectionEndComments(firstItem); + if (ec) Array.prototype.push.apply(this.items, ec); + } + + get includesTrailingLines() { + return this.items.length > 0; + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + this.context = context; + const { + parseNode, + src + } = context; // It's easier to recalculate lineStart here rather than tracking down the + // last context from which to read it -- eemeli/yaml#2 + + let lineStart = PlainValue.Node.startOfLine(src, start); + const firstItem = this.items[0]; // First-item context needs to be correct for later comment handling + // -- eemeli/yaml#17 + + firstItem.context.parent = this; + this.valueRange = PlainValue.Range.copy(firstItem.valueRange); + const indent = firstItem.range.start - firstItem.context.lineStart; + let offset = start; + offset = PlainValue.Node.normalizeOffset(src, offset); + let ch = src[offset]; + let atLineStart = PlainValue.Node.endOfWhiteSpace(src, lineStart) === offset; + let prevIncludesTrailingLines = false; + + while (ch) { + while (ch === '\n' || ch === '#') { + if (atLineStart && ch === '\n' && !prevIncludesTrailingLines) { + const blankLine = new BlankLine(); + offset = blankLine.parse({ + src + }, offset); + this.valueRange.end = offset; + + if (offset >= src.length) { + ch = null; + break; + } + + this.items.push(blankLine); + offset -= 1; // blankLine.parse() consumes terminal newline + } else if (ch === '#') { + if (offset < lineStart + indent && !Collection.nextContentHasIndent(src, offset, indent)) { + return offset; + } + + const comment = new Comment(); + offset = comment.parse({ + indent, + lineStart, + src + }, offset); + this.items.push(comment); + this.valueRange.end = offset; + + if (offset >= src.length) { + ch = null; + break; + } + } + + lineStart = offset + 1; + offset = PlainValue.Node.endOfIndent(src, lineStart); + + if (PlainValue.Node.atBlank(src, offset)) { + const wsEnd = PlainValue.Node.endOfWhiteSpace(src, offset); + const next = src[wsEnd]; + + if (!next || next === '\n' || next === '#') { + offset = wsEnd; + } + } + + ch = src[offset]; + atLineStart = true; + } + + if (!ch) { + break; + } + + if (offset !== lineStart + indent && (atLineStart || ch !== ':')) { + if (offset < lineStart + indent) { + if (lineStart > start) offset = lineStart; + break; + } else if (!this.error) { + const msg = 'All collection items must start at the same column'; + this.error = new PlainValue.YAMLSyntaxError(this, msg); + } + } + + if (firstItem.type === PlainValue.Type.SEQ_ITEM) { + if (ch !== '-') { + if (lineStart > start) offset = lineStart; + break; + } + } else if (ch === '-' && !this.error) { + // map key may start with -, as long as it's followed by a non-whitespace char + const next = src[offset + 1]; + + if (!next || next === '\n' || next === '\t' || next === ' ') { + const msg = 'A collection cannot be both a mapping and a sequence'; + this.error = new PlainValue.YAMLSyntaxError(this, msg); + } + } + + const node = parseNode({ + atLineStart, + inCollection: true, + indent, + lineStart, + parent: this + }, offset); + if (!node) return offset; // at next document start + + this.items.push(node); + this.valueRange.end = node.valueRange.end; + offset = PlainValue.Node.normalizeOffset(src, node.range.end); + ch = src[offset]; + atLineStart = false; + prevIncludesTrailingLines = node.includesTrailingLines; // Need to reset lineStart and atLineStart here if preceding node's range + // has advanced to check the current line's indentation level + // -- eemeli/yaml#10 & eemeli/yaml#38 + + if (ch) { + let ls = offset - 1; + let prev = src[ls]; + + while (prev === ' ' || prev === '\t') prev = src[--ls]; + + if (prev === '\n') { + lineStart = ls + 1; + atLineStart = true; + } + } + + const ec = grabCollectionEndComments(node); + if (ec) Array.prototype.push.apply(this.items, ec); + } + + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + this.items.forEach(node => { + offset = node.setOrigRanges(cr, offset); + }); + return offset; + } + + toString() { + const { + context: { + src + }, + items, + range, + value + } = this; + if (value != null) return value; + let str = src.slice(range.start, items[0].range.start) + String(items[0]); + + for (let i = 1; i < items.length; ++i) { + const item = items[i]; + const { + atLineStart, + indent + } = item.context; + if (atLineStart) for (let i = 0; i < indent; ++i) str += ' '; + str += String(item); + } + + return PlainValue.Node.addStringTerminator(src, range.end, str); + } + +} + +class Directive extends PlainValue.Node { + constructor() { + super(PlainValue.Type.DIRECTIVE); + this.name = null; + } + + get parameters() { + const raw = this.rawValue; + return raw ? raw.trim().split(/[ \t]+/) : []; + } + + parseName(start) { + const { + src + } = this.context; + let offset = start; + let ch = src[offset]; + + while (ch && ch !== '\n' && ch !== '\t' && ch !== ' ') ch = src[offset += 1]; + + this.name = src.slice(start, offset); + return offset; + } + + parseParameters(start) { + const { + src + } = this.context; + let offset = start; + let ch = src[offset]; + + while (ch && ch !== '\n' && ch !== '#') ch = src[offset += 1]; + + this.valueRange = new PlainValue.Range(start, offset); + return offset; + } + + parse(context, start) { + this.context = context; + let offset = this.parseName(start + 1); + offset = this.parseParameters(offset); + offset = this.parseComment(offset); + this.range = new PlainValue.Range(start, offset); + return offset; + } + +} + +class Document extends PlainValue.Node { + static startCommentOrEndBlankLine(src, start) { + const offset = PlainValue.Node.endOfWhiteSpace(src, start); + const ch = src[offset]; + return ch === '#' || ch === '\n' ? offset : start; + } + + constructor() { + super(PlainValue.Type.DOCUMENT); + this.directives = null; + this.contents = null; + this.directivesEndMarker = null; + this.documentEndMarker = null; + } + + parseDirectives(start) { + const { + src + } = this.context; + this.directives = []; + let atLineStart = true; + let hasDirectives = false; + let offset = start; + + while (!PlainValue.Node.atDocumentBoundary(src, offset, PlainValue.Char.DIRECTIVES_END)) { + offset = Document.startCommentOrEndBlankLine(src, offset); + + switch (src[offset]) { + case '\n': + if (atLineStart) { + const blankLine = new BlankLine(); + offset = blankLine.parse({ + src + }, offset); + + if (offset < src.length) { + this.directives.push(blankLine); + } + } else { + offset += 1; + atLineStart = true; + } + + break; + + case '#': + { + const comment = new Comment(); + offset = comment.parse({ + src + }, offset); + this.directives.push(comment); + atLineStart = false; + } + break; + + case '%': + { + const directive = new Directive(); + offset = directive.parse({ + parent: this, + src + }, offset); + this.directives.push(directive); + hasDirectives = true; + atLineStart = false; + } + break; + + default: + if (hasDirectives) { + this.error = new PlainValue.YAMLSemanticError(this, 'Missing directives-end indicator line'); + } else if (this.directives.length > 0) { + this.contents = this.directives; + this.directives = []; + } + + return offset; + } + } + + if (src[offset]) { + this.directivesEndMarker = new PlainValue.Range(offset, offset + 3); + return offset + 3; + } + + if (hasDirectives) { + this.error = new PlainValue.YAMLSemanticError(this, 'Missing directives-end indicator line'); + } else if (this.directives.length > 0) { + this.contents = this.directives; + this.directives = []; + } + + return offset; + } + + parseContents(start) { + const { + parseNode, + src + } = this.context; + if (!this.contents) this.contents = []; + let lineStart = start; + + while (src[lineStart - 1] === '-') lineStart -= 1; + + let offset = PlainValue.Node.endOfWhiteSpace(src, start); + let atLineStart = lineStart === start; + this.valueRange = new PlainValue.Range(offset); + + while (!PlainValue.Node.atDocumentBoundary(src, offset, PlainValue.Char.DOCUMENT_END)) { + switch (src[offset]) { + case '\n': + if (atLineStart) { + const blankLine = new BlankLine(); + offset = blankLine.parse({ + src + }, offset); + + if (offset < src.length) { + this.contents.push(blankLine); + } + } else { + offset += 1; + atLineStart = true; + } + + lineStart = offset; + break; + + case '#': + { + const comment = new Comment(); + offset = comment.parse({ + src + }, offset); + this.contents.push(comment); + atLineStart = false; + } + break; + + default: + { + const iEnd = PlainValue.Node.endOfIndent(src, offset); + const context = { + atLineStart, + indent: -1, + inFlow: false, + inCollection: false, + lineStart, + parent: this + }; + const node = parseNode(context, iEnd); + if (!node) return this.valueRange.end = iEnd; // at next document start + + this.contents.push(node); + offset = node.range.end; + atLineStart = false; + const ec = grabCollectionEndComments(node); + if (ec) Array.prototype.push.apply(this.contents, ec); + } + } + + offset = Document.startCommentOrEndBlankLine(src, offset); + } + + this.valueRange.end = offset; + + if (src[offset]) { + this.documentEndMarker = new PlainValue.Range(offset, offset + 3); + offset += 3; + + if (src[offset]) { + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + + if (src[offset] === '#') { + const comment = new Comment(); + offset = comment.parse({ + src + }, offset); + this.contents.push(comment); + } + + switch (src[offset]) { + case '\n': + offset += 1; + break; + + case undefined: + break; + + default: + this.error = new PlainValue.YAMLSyntaxError(this, 'Document end marker line cannot have a non-comment suffix'); + } + } + } + + return offset; + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + context.root = this; + this.context = context; + const { + src + } = context; + let offset = src.charCodeAt(start) === 0xfeff ? start + 1 : start; // skip BOM + + offset = this.parseDirectives(offset); + offset = this.parseContents(offset); + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + this.directives.forEach(node => { + offset = node.setOrigRanges(cr, offset); + }); + if (this.directivesEndMarker) offset = this.directivesEndMarker.setOrigRange(cr, offset); + this.contents.forEach(node => { + offset = node.setOrigRanges(cr, offset); + }); + if (this.documentEndMarker) offset = this.documentEndMarker.setOrigRange(cr, offset); + return offset; + } + + toString() { + const { + contents, + directives, + value + } = this; + if (value != null) return value; + let str = directives.join(''); + + if (contents.length > 0) { + if (directives.length > 0 || contents[0].type === PlainValue.Type.COMMENT) str += '---\n'; + str += contents.join(''); + } + + if (str[str.length - 1] !== '\n') str += '\n'; + return str; + } + +} + +class Alias extends PlainValue.Node { + /** + * Parses an *alias from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + parse(context, start) { + this.context = context; + const { + src + } = context; + let offset = PlainValue.Node.endOfIdentifier(src, start + 1); + this.valueRange = new PlainValue.Range(start + 1, offset); + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + return offset; + } + +} + +const Chomp = { + CLIP: 'CLIP', + KEEP: 'KEEP', + STRIP: 'STRIP' +}; +class BlockValue extends PlainValue.Node { + constructor(type, props) { + super(type, props); + this.blockIndent = null; + this.chomping = Chomp.CLIP; + this.header = null; + } + + get includesTrailingLines() { + return this.chomping === Chomp.KEEP; + } + + get strValue() { + if (!this.valueRange || !this.context) return null; + let { + start, + end + } = this.valueRange; + const { + indent, + src + } = this.context; + if (this.valueRange.isEmpty()) return ''; + let lastNewLine = null; + let ch = src[end - 1]; + + while (ch === '\n' || ch === '\t' || ch === ' ') { + end -= 1; + + if (end <= start) { + if (this.chomping === Chomp.KEEP) break;else return ''; // probably never happens + } + + if (ch === '\n') lastNewLine = end; + ch = src[end - 1]; + } + + let keepStart = end + 1; + + if (lastNewLine) { + if (this.chomping === Chomp.KEEP) { + keepStart = lastNewLine; + end = this.valueRange.end; + } else { + end = lastNewLine; + } + } + + const bi = indent + this.blockIndent; + const folded = this.type === PlainValue.Type.BLOCK_FOLDED; + let atStart = true; + let str = ''; + let sep = ''; + let prevMoreIndented = false; + + for (let i = start; i < end; ++i) { + for (let j = 0; j < bi; ++j) { + if (src[i] !== ' ') break; + i += 1; + } + + const ch = src[i]; + + if (ch === '\n') { + if (sep === '\n') str += '\n';else sep = '\n'; + } else { + const lineEnd = PlainValue.Node.endOfLine(src, i); + const line = src.slice(i, lineEnd); + i = lineEnd; + + if (folded && (ch === ' ' || ch === '\t') && i < keepStart) { + if (sep === ' ') sep = '\n';else if (!prevMoreIndented && !atStart && sep === '\n') sep = '\n\n'; + str += sep + line; //+ ((lineEnd < end && src[lineEnd]) || '') + + sep = lineEnd < end && src[lineEnd] || ''; + prevMoreIndented = true; + } else { + str += sep + line; + sep = folded && i < keepStart ? ' ' : '\n'; + prevMoreIndented = false; + } + + if (atStart && line !== '') atStart = false; + } + } + + return this.chomping === Chomp.STRIP ? str : str + '\n'; + } + + parseBlockHeader(start) { + const { + src + } = this.context; + let offset = start + 1; + let bi = ''; + + while (true) { + const ch = src[offset]; + + switch (ch) { + case '-': + this.chomping = Chomp.STRIP; + break; + + case '+': + this.chomping = Chomp.KEEP; + break; + + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + bi += ch; + break; + + default: + this.blockIndent = Number(bi) || null; + this.header = new PlainValue.Range(start, offset); + return offset; + } + + offset += 1; + } + } + + parseBlockValue(start) { + const { + indent, + src + } = this.context; + const explicit = !!this.blockIndent; + let offset = start; + let valueEnd = start; + let minBlockIndent = 1; + + for (let ch = src[offset]; ch === '\n'; ch = src[offset]) { + offset += 1; + if (PlainValue.Node.atDocumentBoundary(src, offset)) break; + const end = PlainValue.Node.endOfBlockIndent(src, indent, offset); // should not include tab? + + if (end === null) break; + const ch = src[end]; + const lineIndent = end - (offset + indent); + + if (!this.blockIndent) { + // no explicit block indent, none yet detected + if (src[end] !== '\n') { + // first line with non-whitespace content + if (lineIndent < minBlockIndent) { + const msg = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; + this.error = new PlainValue.YAMLSemanticError(this, msg); + } + + this.blockIndent = lineIndent; + } else if (lineIndent > minBlockIndent) { + // empty line with more whitespace + minBlockIndent = lineIndent; + } + } else if (ch && ch !== '\n' && lineIndent < this.blockIndent) { + if (src[end] === '#') break; + + if (!this.error) { + const src = explicit ? 'explicit indentation indicator' : 'first line'; + const msg = `Block scalars must not be less indented than their ${src}`; + this.error = new PlainValue.YAMLSemanticError(this, msg); + } + } + + if (src[end] === '\n') { + offset = end; + } else { + offset = valueEnd = PlainValue.Node.endOfLine(src, end); + } + } + + if (this.chomping !== Chomp.KEEP) { + offset = src[valueEnd] ? valueEnd + 1 : valueEnd; + } + + this.valueRange = new PlainValue.Range(start + 1, offset); + return offset; + } + /** + * Parses a block value from the source + * + * Accepted forms are: + * ``` + * BS + * block + * lines + * + * BS #comment + * block + * lines + * ``` + * where the block style BS matches the regexp `[|>][-+1-9]*` and block lines + * are empty or have an indent level greater than `indent`. + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this block + */ + + + parse(context, start) { + this.context = context; + const { + src + } = context; + let offset = this.parseBlockHeader(start); + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + offset = this.parseBlockValue(offset); + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + return this.header ? this.header.setOrigRange(cr, offset) : offset; + } + +} + +class FlowCollection extends PlainValue.Node { + constructor(type, props) { + super(type, props); + this.items = null; + } + + prevNodeIsJsonLike(idx = this.items.length) { + const node = this.items[idx - 1]; + return !!node && (node.jsonLike || node.type === PlainValue.Type.COMMENT && this.prevNodeIsJsonLike(idx - 1)); + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + this.context = context; + const { + parseNode, + src + } = context; + let { + indent, + lineStart + } = context; + let char = src[start]; // { or [ + + this.items = [{ + char, + offset: start + }]; + let offset = PlainValue.Node.endOfWhiteSpace(src, start + 1); + char = src[offset]; + + while (char && char !== ']' && char !== '}') { + switch (char) { + case '\n': + { + lineStart = offset + 1; + const wsEnd = PlainValue.Node.endOfWhiteSpace(src, lineStart); + + if (src[wsEnd] === '\n') { + const blankLine = new BlankLine(); + lineStart = blankLine.parse({ + src + }, lineStart); + this.items.push(blankLine); + } + + offset = PlainValue.Node.endOfIndent(src, lineStart); + + if (offset <= lineStart + indent) { + char = src[offset]; + + if (offset < lineStart + indent || char !== ']' && char !== '}') { + const msg = 'Insufficient indentation in flow collection'; + this.error = new PlainValue.YAMLSemanticError(this, msg); + } + } + } + break; + + case ',': + { + this.items.push({ + char, + offset + }); + offset += 1; + } + break; + + case '#': + { + const comment = new Comment(); + offset = comment.parse({ + src + }, offset); + this.items.push(comment); + } + break; + + case '?': + case ':': + { + const next = src[offset + 1]; + + if (next === '\n' || next === '\t' || next === ' ' || next === ',' || // in-flow : after JSON-like key does not need to be followed by whitespace + char === ':' && this.prevNodeIsJsonLike()) { + this.items.push({ + char, + offset + }); + offset += 1; + break; + } + } + // fallthrough + + default: + { + const node = parseNode({ + atLineStart: false, + inCollection: false, + inFlow: true, + indent: -1, + lineStart, + parent: this + }, offset); + + if (!node) { + // at next document start + this.valueRange = new PlainValue.Range(start, offset); + return offset; + } + + this.items.push(node); + offset = PlainValue.Node.normalizeOffset(src, node.range.end); + } + } + + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + char = src[offset]; + } + + this.valueRange = new PlainValue.Range(start, offset + 1); + + if (char) { + this.items.push({ + char, + offset + }); + offset = PlainValue.Node.endOfWhiteSpace(src, offset + 1); + offset = this.parseComment(offset); + } + + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + this.items.forEach(node => { + if (node instanceof PlainValue.Node) { + offset = node.setOrigRanges(cr, offset); + } else if (cr.length === 0) { + node.origOffset = node.offset; + } else { + let i = offset; + + while (i < cr.length) { + if (cr[i] > node.offset) break;else ++i; + } + + node.origOffset = node.offset + i; + offset = i; + } + }); + return offset; + } + + toString() { + const { + context: { + src + }, + items, + range, + value + } = this; + if (value != null) return value; + const nodes = items.filter(item => item instanceof PlainValue.Node); + let str = ''; + let prevEnd = range.start; + nodes.forEach(node => { + const prefix = src.slice(prevEnd, node.range.start); + prevEnd = node.range.end; + str += prefix + String(node); + + if (str[str.length - 1] === '\n' && src[prevEnd - 1] !== '\n' && src[prevEnd] === '\n') { + // Comment range does not include the terminal newline, but its + // stringified value does. Without this fix, newlines at comment ends + // get duplicated. + prevEnd += 1; + } + }); + str += src.slice(prevEnd, range.end); + return PlainValue.Node.addStringTerminator(src, range.end, str); + } + +} + +class QuoteDouble extends PlainValue.Node { + static endOfQuote(src, offset) { + let ch = src[offset]; + + while (ch && ch !== '"') { + offset += ch === '\\' ? 2 : 1; + ch = src[offset]; + } + + return offset + 1; + } + /** + * @returns {string | { str: string, errors: YAMLSyntaxError[] }} + */ + + + get strValue() { + if (!this.valueRange || !this.context) return null; + const errors = []; + const { + start, + end + } = this.valueRange; + const { + indent, + src + } = this.context; + if (src[end - 1] !== '"') errors.push(new PlainValue.YAMLSyntaxError(this, 'Missing closing "quote')); // Using String#replace is too painful with escaped newlines preceded by + // escaped backslashes; also, this should be faster. + + let str = ''; + + for (let i = start + 1; i < end - 1; ++i) { + const ch = src[i]; + + if (ch === '\n') { + if (PlainValue.Node.atDocumentBoundary(src, i + 1)) errors.push(new PlainValue.YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values')); + const { + fold, + offset, + error + } = PlainValue.Node.foldNewline(src, i, indent); + str += fold; + i = offset; + if (error) errors.push(new PlainValue.YAMLSemanticError(this, 'Multi-line double-quoted string needs to be sufficiently indented')); + } else if (ch === '\\') { + i += 1; + + switch (src[i]) { + case '0': + str += '\0'; + break; + // null character + + case 'a': + str += '\x07'; + break; + // bell character + + case 'b': + str += '\b'; + break; + // backspace + + case 'e': + str += '\x1b'; + break; + // escape character + + case 'f': + str += '\f'; + break; + // form feed + + case 'n': + str += '\n'; + break; + // line feed + + case 'r': + str += '\r'; + break; + // carriage return + + case 't': + str += '\t'; + break; + // horizontal tab + + case 'v': + str += '\v'; + break; + // vertical tab + + case 'N': + str += '\u0085'; + break; + // Unicode next line + + case '_': + str += '\u00a0'; + break; + // Unicode non-breaking space + + case 'L': + str += '\u2028'; + break; + // Unicode line separator + + case 'P': + str += '\u2029'; + break; + // Unicode paragraph separator + + case ' ': + str += ' '; + break; + + case '"': + str += '"'; + break; + + case '/': + str += '/'; + break; + + case '\\': + str += '\\'; + break; + + case '\t': + str += '\t'; + break; + + case 'x': + str += this.parseCharCode(i + 1, 2, errors); + i += 2; + break; + + case 'u': + str += this.parseCharCode(i + 1, 4, errors); + i += 4; + break; + + case 'U': + str += this.parseCharCode(i + 1, 8, errors); + i += 8; + break; + + case '\n': + // skip escaped newlines, but still trim the following line + while (src[i + 1] === ' ' || src[i + 1] === '\t') i += 1; + + break; + + default: + errors.push(new PlainValue.YAMLSyntaxError(this, `Invalid escape sequence ${src.substr(i - 1, 2)}`)); + str += '\\' + src[i]; + } + } else if (ch === ' ' || ch === '\t') { + // trim trailing whitespace + const wsStart = i; + let next = src[i + 1]; + + while (next === ' ' || next === '\t') { + i += 1; + next = src[i + 1]; + } + + if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; + } else { + str += ch; + } + } + + return errors.length > 0 ? { + errors, + str + } : str; + } + + parseCharCode(offset, length, errors) { + const { + src + } = this.context; + const cc = src.substr(offset, length); + const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); + const code = ok ? parseInt(cc, 16) : NaN; + + if (isNaN(code)) { + errors.push(new PlainValue.YAMLSyntaxError(this, `Invalid escape sequence ${src.substr(offset - 2, length + 2)}`)); + return src.substr(offset - 2, length + 2); + } + + return String.fromCodePoint(code); + } + /** + * Parses a "double quoted" value from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + + + parse(context, start) { + this.context = context; + const { + src + } = context; + let offset = QuoteDouble.endOfQuote(src, start + 1); + this.valueRange = new PlainValue.Range(start, offset); + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + return offset; + } + +} + +class QuoteSingle extends PlainValue.Node { + static endOfQuote(src, offset) { + let ch = src[offset]; + + while (ch) { + if (ch === "'") { + if (src[offset + 1] !== "'") break; + ch = src[offset += 2]; + } else { + ch = src[offset += 1]; + } + } + + return offset + 1; + } + /** + * @returns {string | { str: string, errors: YAMLSyntaxError[] }} + */ + + + get strValue() { + if (!this.valueRange || !this.context) return null; + const errors = []; + const { + start, + end + } = this.valueRange; + const { + indent, + src + } = this.context; + if (src[end - 1] !== "'") errors.push(new PlainValue.YAMLSyntaxError(this, "Missing closing 'quote")); + let str = ''; + + for (let i = start + 1; i < end - 1; ++i) { + const ch = src[i]; + + if (ch === '\n') { + if (PlainValue.Node.atDocumentBoundary(src, i + 1)) errors.push(new PlainValue.YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values')); + const { + fold, + offset, + error + } = PlainValue.Node.foldNewline(src, i, indent); + str += fold; + i = offset; + if (error) errors.push(new PlainValue.YAMLSemanticError(this, 'Multi-line single-quoted string needs to be sufficiently indented')); + } else if (ch === "'") { + str += ch; + i += 1; + if (src[i] !== "'") errors.push(new PlainValue.YAMLSyntaxError(this, 'Unescaped single quote? This should not happen.')); + } else if (ch === ' ' || ch === '\t') { + // trim trailing whitespace + const wsStart = i; + let next = src[i + 1]; + + while (next === ' ' || next === '\t') { + i += 1; + next = src[i + 1]; + } + + if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; + } else { + str += ch; + } + } + + return errors.length > 0 ? { + errors, + str + } : str; + } + /** + * Parses a 'single quoted' value from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + + + parse(context, start) { + this.context = context; + const { + src + } = context; + let offset = QuoteSingle.endOfQuote(src, start + 1); + this.valueRange = new PlainValue.Range(start, offset); + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + return offset; + } + +} + +function createNewNode(type, props) { + switch (type) { + case PlainValue.Type.ALIAS: + return new Alias(type, props); + + case PlainValue.Type.BLOCK_FOLDED: + case PlainValue.Type.BLOCK_LITERAL: + return new BlockValue(type, props); + + case PlainValue.Type.FLOW_MAP: + case PlainValue.Type.FLOW_SEQ: + return new FlowCollection(type, props); + + case PlainValue.Type.MAP_KEY: + case PlainValue.Type.MAP_VALUE: + case PlainValue.Type.SEQ_ITEM: + return new CollectionItem(type, props); + + case PlainValue.Type.COMMENT: + case PlainValue.Type.PLAIN: + return new PlainValue.PlainValue(type, props); + + case PlainValue.Type.QUOTE_DOUBLE: + return new QuoteDouble(type, props); + + case PlainValue.Type.QUOTE_SINGLE: + return new QuoteSingle(type, props); + + /* istanbul ignore next */ + + default: + return null; + // should never happen + } +} +/** + * @param {boolean} atLineStart - Node starts at beginning of line + * @param {boolean} inFlow - true if currently in a flow context + * @param {boolean} inCollection - true if currently in a collection context + * @param {number} indent - Current level of indentation + * @param {number} lineStart - Start of the current line + * @param {Node} parent - The parent of the node + * @param {string} src - Source of the YAML document + */ + + +class ParseContext { + static parseType(src, offset, inFlow) { + switch (src[offset]) { + case '*': + return PlainValue.Type.ALIAS; + + case '>': + return PlainValue.Type.BLOCK_FOLDED; + + case '|': + return PlainValue.Type.BLOCK_LITERAL; + + case '{': + return PlainValue.Type.FLOW_MAP; + + case '[': + return PlainValue.Type.FLOW_SEQ; + + case '?': + return !inFlow && PlainValue.Node.atBlank(src, offset + 1, true) ? PlainValue.Type.MAP_KEY : PlainValue.Type.PLAIN; + + case ':': + return !inFlow && PlainValue.Node.atBlank(src, offset + 1, true) ? PlainValue.Type.MAP_VALUE : PlainValue.Type.PLAIN; + + case '-': + return !inFlow && PlainValue.Node.atBlank(src, offset + 1, true) ? PlainValue.Type.SEQ_ITEM : PlainValue.Type.PLAIN; + + case '"': + return PlainValue.Type.QUOTE_DOUBLE; + + case "'": + return PlainValue.Type.QUOTE_SINGLE; + + default: + return PlainValue.Type.PLAIN; + } + } + + constructor(orig = {}, { + atLineStart, + inCollection, + inFlow, + indent, + lineStart, + parent + } = {}) { + PlainValue._defineProperty(this, "parseNode", (overlay, start) => { + if (PlainValue.Node.atDocumentBoundary(this.src, start)) return null; + const context = new ParseContext(this, overlay); + const { + props, + type, + valueStart + } = context.parseProps(start); + const node = createNewNode(type, props); + let offset = node.parse(context, valueStart); + node.range = new PlainValue.Range(start, offset); + /* istanbul ignore if */ + + if (offset <= start) { + // This should never happen, but if it does, let's make sure to at least + // step one character forward to avoid a busy loop. + node.error = new Error(`Node#parse consumed no characters`); + node.error.parseEnd = offset; + node.error.source = node; + node.range.end = start + 1; + } + + if (context.nodeStartsCollection(node)) { + if (!node.error && !context.atLineStart && context.parent.type === PlainValue.Type.DOCUMENT) { + node.error = new PlainValue.YAMLSyntaxError(node, 'Block collection must not have preceding content here (e.g. directives-end indicator)'); + } + + const collection = new Collection(node); + offset = collection.parse(new ParseContext(context), offset); + collection.range = new PlainValue.Range(start, offset); + return collection; + } + + return node; + }); + + this.atLineStart = atLineStart != null ? atLineStart : orig.atLineStart || false; + this.inCollection = inCollection != null ? inCollection : orig.inCollection || false; + this.inFlow = inFlow != null ? inFlow : orig.inFlow || false; + this.indent = indent != null ? indent : orig.indent; + this.lineStart = lineStart != null ? lineStart : orig.lineStart; + this.parent = parent != null ? parent : orig.parent || {}; + this.root = orig.root; + this.src = orig.src; + } + + nodeStartsCollection(node) { + const { + inCollection, + inFlow, + src + } = this; + if (inCollection || inFlow) return false; + if (node instanceof CollectionItem) return true; // check for implicit key + + let offset = node.range.end; + if (src[offset] === '\n' || src[offset - 1] === '\n') return false; + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + return src[offset] === ':'; + } // Anchor and tag are before type, which determines the node implementation + // class; hence this intermediate step. + + + parseProps(offset) { + const { + inFlow, + parent, + src + } = this; + const props = []; + let lineHasProps = false; + offset = this.atLineStart ? PlainValue.Node.endOfIndent(src, offset) : PlainValue.Node.endOfWhiteSpace(src, offset); + let ch = src[offset]; + + while (ch === PlainValue.Char.ANCHOR || ch === PlainValue.Char.COMMENT || ch === PlainValue.Char.TAG || ch === '\n') { + if (ch === '\n') { + const lineStart = offset + 1; + const inEnd = PlainValue.Node.endOfIndent(src, lineStart); + const indentDiff = inEnd - (lineStart + this.indent); + const noIndicatorAsIndent = parent.type === PlainValue.Type.SEQ_ITEM && parent.context.atLineStart; + if (!PlainValue.Node.nextNodeIsIndented(src[inEnd], indentDiff, !noIndicatorAsIndent)) break; + this.atLineStart = true; + this.lineStart = lineStart; + lineHasProps = false; + offset = inEnd; + } else if (ch === PlainValue.Char.COMMENT) { + const end = PlainValue.Node.endOfLine(src, offset + 1); + props.push(new PlainValue.Range(offset, end)); + offset = end; + } else { + let end = PlainValue.Node.endOfIdentifier(src, offset + 1); + + if (ch === PlainValue.Char.TAG && src[end] === ',' && /^[a-zA-Z0-9-]+\.[a-zA-Z0-9-]+,\d\d\d\d(-\d\d){0,2}\/\S/.test(src.slice(offset + 1, end + 13))) { + // Let's presume we're dealing with a YAML 1.0 domain tag here, rather + // than an empty but 'foo.bar' private-tagged node in a flow collection + // followed without whitespace by a plain string starting with a year + // or date divided by something. + end = PlainValue.Node.endOfIdentifier(src, end + 5); + } + + props.push(new PlainValue.Range(offset, end)); + lineHasProps = true; + offset = PlainValue.Node.endOfWhiteSpace(src, end); + } + + ch = src[offset]; + } // '- &a : b' has an anchor on an empty node + + + if (lineHasProps && ch === ':' && PlainValue.Node.atBlank(src, offset + 1, true)) offset -= 1; + const type = ParseContext.parseType(src, offset, inFlow); + return { + props, + type, + valueStart: offset + }; + } + /** + * Parses a node from the source + * @param {ParseContext} overlay + * @param {number} start - Index of first non-whitespace character for the node + * @returns {?Node} - null if at a document boundary + */ + + +} + +// Published as 'yaml/parse-cst' +function parse(src) { + const cr = []; + + if (src.indexOf('\r') !== -1) { + src = src.replace(/\r\n?/g, (match, offset) => { + if (match.length > 1) cr.push(offset); + return '\n'; + }); + } + + const documents = []; + let offset = 0; + + do { + const doc = new Document(); + const context = new ParseContext({ + src + }); + offset = doc.parse(context, offset); + documents.push(doc); + } while (offset < src.length); + + documents.setOrigRanges = () => { + if (cr.length === 0) return false; + + for (let i = 1; i < cr.length; ++i) cr[i] -= i; + + let crOffset = 0; + + for (let i = 0; i < documents.length; ++i) { + crOffset = documents[i].setOrigRanges(cr, crOffset); + } + + cr.splice(0, cr.length); + return true; + }; + + documents.toString = () => documents.join('...\n'); + + return documents; +} + +exports.parse = parse; + + +/***/ }), + +/***/ 140: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var PlainValue = __nccwpck_require__(215); + +function addCommentBefore(str, indent, comment) { + if (!comment) return str; + const cc = comment.replace(/[\s\S]^/gm, `$&${indent}#`); + return `#${cc}\n${indent}${str}`; +} +function addComment(str, indent, comment) { + return !comment ? str : comment.indexOf('\n') === -1 ? `${str} #${comment}` : `${str}\n` + comment.replace(/^/gm, `${indent || ''}#`); +} + +class Node {} + +function toJSON(value, arg, ctx) { + if (Array.isArray(value)) return value.map((v, i) => toJSON(v, String(i), ctx)); + + if (value && typeof value.toJSON === 'function') { + const anchor = ctx && ctx.anchors && ctx.anchors.get(value); + if (anchor) ctx.onCreate = res => { + anchor.res = res; + delete ctx.onCreate; + }; + const res = value.toJSON(arg, ctx); + if (anchor && ctx.onCreate) ctx.onCreate(res); + return res; + } + + if ((!ctx || !ctx.keep) && typeof value === 'bigint') return Number(value); + return value; +} + +class Scalar extends Node { + constructor(value) { + super(); + this.value = value; + } + + toJSON(arg, ctx) { + return ctx && ctx.keep ? this.value : toJSON(this.value, arg, ctx); + } + + toString() { + return String(this.value); + } + +} + +function collectionFromPath(schema, path, value) { + let v = value; + + for (let i = path.length - 1; i >= 0; --i) { + const k = path[i]; + const o = Number.isInteger(k) && k >= 0 ? [] : {}; + o[k] = v; + v = o; + } + + return schema.createNode(v, false); +} // null, undefined, or an empty non-string iterable (e.g. []) + + +const isEmptyPath = path => path == null || typeof path === 'object' && path[Symbol.iterator]().next().done; +class Collection extends Node { + constructor(schema) { + super(); + + PlainValue._defineProperty(this, "items", []); + + this.schema = schema; + } + + addIn(path, value) { + if (isEmptyPath(path)) this.add(value);else { + const [key, ...rest] = path; + const node = this.get(key, true); + if (node instanceof Collection) node.addIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + } + + deleteIn([key, ...rest]) { + if (rest.length === 0) return this.delete(key); + const node = this.get(key, true); + if (node instanceof Collection) return node.deleteIn(rest);else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + + getIn([key, ...rest], keepScalar) { + const node = this.get(key, true); + if (rest.length === 0) return !keepScalar && node instanceof Scalar ? node.value : node;else return node instanceof Collection ? node.getIn(rest, keepScalar) : undefined; + } + + hasAllNullValues() { + return this.items.every(node => { + if (!node || node.type !== 'PAIR') return false; + const n = node.value; + return n == null || n instanceof Scalar && n.value == null && !n.commentBefore && !n.comment && !n.tag; + }); + } + + hasIn([key, ...rest]) { + if (rest.length === 0) return this.has(key); + const node = this.get(key, true); + return node instanceof Collection ? node.hasIn(rest) : false; + } + + setIn([key, ...rest], value) { + if (rest.length === 0) { + this.set(key, value); + } else { + const node = this.get(key, true); + if (node instanceof Collection) node.setIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + } // overridden in implementations + + /* istanbul ignore next */ + + + toJSON() { + return null; + } + + toString(ctx, { + blockItem, + flowChars, + isMap, + itemIndent + }, onComment, onChompKeep) { + const { + indent, + indentStep, + stringify + } = ctx; + const inFlow = this.type === PlainValue.Type.FLOW_MAP || this.type === PlainValue.Type.FLOW_SEQ || ctx.inFlow; + if (inFlow) itemIndent += indentStep; + const allNullValues = isMap && this.hasAllNullValues(); + ctx = Object.assign({}, ctx, { + allNullValues, + indent: itemIndent, + inFlow, + type: null + }); + let chompKeep = false; + let hasItemWithNewLine = false; + const nodes = this.items.reduce((nodes, item, i) => { + let comment; + + if (item) { + if (!chompKeep && item.spaceBefore) nodes.push({ + type: 'comment', + str: '' + }); + if (item.commentBefore) item.commentBefore.match(/^.*$/gm).forEach(line => { + nodes.push({ + type: 'comment', + str: `#${line}` + }); + }); + if (item.comment) comment = item.comment; + if (inFlow && (!chompKeep && item.spaceBefore || item.commentBefore || item.comment || item.key && (item.key.commentBefore || item.key.comment) || item.value && (item.value.commentBefore || item.value.comment))) hasItemWithNewLine = true; + } + + chompKeep = false; + let str = stringify(item, ctx, () => comment = null, () => chompKeep = true); + if (inFlow && !hasItemWithNewLine && str.includes('\n')) hasItemWithNewLine = true; + if (inFlow && i < this.items.length - 1) str += ','; + str = addComment(str, itemIndent, comment); + if (chompKeep && (comment || inFlow)) chompKeep = false; + nodes.push({ + type: 'item', + str + }); + return nodes; + }, []); + let str; + + if (nodes.length === 0) { + str = flowChars.start + flowChars.end; + } else if (inFlow) { + const { + start, + end + } = flowChars; + const strings = nodes.map(n => n.str); + + if (hasItemWithNewLine || strings.reduce((sum, str) => sum + str.length + 2, 2) > Collection.maxFlowStringSingleLineLength) { + str = start; + + for (const s of strings) { + str += s ? `\n${indentStep}${indent}${s}` : '\n'; + } + + str += `\n${indent}${end}`; + } else { + str = `${start} ${strings.join(' ')} ${end}`; + } + } else { + const strings = nodes.map(blockItem); + str = strings.shift(); + + for (const s of strings) str += s ? `\n${indent}${s}` : '\n'; + } + + if (this.comment) { + str += '\n' + this.comment.replace(/^/gm, `${indent}#`); + if (onComment) onComment(); + } else if (chompKeep && onChompKeep) onChompKeep(); + + return str; + } + +} + +PlainValue._defineProperty(Collection, "maxFlowStringSingleLineLength", 60); + +function asItemIndex(key) { + let idx = key instanceof Scalar ? key.value : key; + if (idx && typeof idx === 'string') idx = Number(idx); + return Number.isInteger(idx) && idx >= 0 ? idx : null; +} + +class YAMLSeq extends Collection { + add(value) { + this.items.push(value); + } + + delete(key) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') return false; + const del = this.items.splice(idx, 1); + return del.length > 0; + } + + get(key, keepScalar) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') return undefined; + const it = this.items[idx]; + return !keepScalar && it instanceof Scalar ? it.value : it; + } + + has(key) { + const idx = asItemIndex(key); + return typeof idx === 'number' && idx < this.items.length; + } + + set(key, value) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') throw new Error(`Expected a valid index, not ${key}.`); + this.items[idx] = value; + } + + toJSON(_, ctx) { + const seq = []; + if (ctx && ctx.onCreate) ctx.onCreate(seq); + let i = 0; + + for (const item of this.items) seq.push(toJSON(item, String(i++), ctx)); + + return seq; + } + + toString(ctx, onComment, onChompKeep) { + if (!ctx) return JSON.stringify(this); + return super.toString(ctx, { + blockItem: n => n.type === 'comment' ? n.str : `- ${n.str}`, + flowChars: { + start: '[', + end: ']' + }, + isMap: false, + itemIndent: (ctx.indent || '') + ' ' + }, onComment, onChompKeep); + } + +} + +const stringifyKey = (key, jsKey, ctx) => { + if (jsKey === null) return ''; + if (typeof jsKey !== 'object') return String(jsKey); + if (key instanceof Node && ctx && ctx.doc) return key.toString({ + anchors: {}, + doc: ctx.doc, + indent: '', + indentStep: ctx.indentStep, + inFlow: true, + inStringifyKey: true, + stringify: ctx.stringify + }); + return JSON.stringify(jsKey); +}; + +class Pair extends Node { + constructor(key, value = null) { + super(); + this.key = key; + this.value = value; + this.type = Pair.Type.PAIR; + } + + get commentBefore() { + return this.key instanceof Node ? this.key.commentBefore : undefined; + } + + set commentBefore(cb) { + if (this.key == null) this.key = new Scalar(null); + if (this.key instanceof Node) this.key.commentBefore = cb;else { + const msg = 'Pair.commentBefore is an alias for Pair.key.commentBefore. To set it, the key must be a Node.'; + throw new Error(msg); + } + } + + addToJSMap(ctx, map) { + const key = toJSON(this.key, '', ctx); + + if (map instanceof Map) { + const value = toJSON(this.value, key, ctx); + map.set(key, value); + } else if (map instanceof Set) { + map.add(key); + } else { + const stringKey = stringifyKey(this.key, key, ctx); + map[stringKey] = toJSON(this.value, stringKey, ctx); + } + + return map; + } + + toJSON(_, ctx) { + const pair = ctx && ctx.mapAsMap ? new Map() : {}; + return this.addToJSMap(ctx, pair); + } + + toString(ctx, onComment, onChompKeep) { + if (!ctx || !ctx.doc) return JSON.stringify(this); + const { + indent: indentSize, + indentSeq, + simpleKeys + } = ctx.doc.options; + let { + key, + value + } = this; + let keyComment = key instanceof Node && key.comment; + + if (simpleKeys) { + if (keyComment) { + throw new Error('With simple keys, key nodes cannot have comments'); + } + + if (key instanceof Collection) { + const msg = 'With simple keys, collection cannot be used as a key value'; + throw new Error(msg); + } + } + + const explicitKey = !simpleKeys && (!key || keyComment || key instanceof Collection || key.type === PlainValue.Type.BLOCK_FOLDED || key.type === PlainValue.Type.BLOCK_LITERAL); + const { + doc, + indent, + indentStep, + stringify + } = ctx; + ctx = Object.assign({}, ctx, { + implicitKey: !explicitKey, + indent: indent + indentStep + }); + let chompKeep = false; + let str = stringify(key, ctx, () => keyComment = null, () => chompKeep = true); + str = addComment(str, ctx.indent, keyComment); + + if (ctx.allNullValues && !simpleKeys) { + if (this.comment) { + str = addComment(str, ctx.indent, this.comment); + if (onComment) onComment(); + } else if (chompKeep && !keyComment && onChompKeep) onChompKeep(); + + return ctx.inFlow ? str : `? ${str}`; + } + + str = explicitKey ? `? ${str}\n${indent}:` : `${str}:`; + + if (this.comment) { + // expected (but not strictly required) to be a single-line comment + str = addComment(str, ctx.indent, this.comment); + if (onComment) onComment(); + } + + let vcb = ''; + let valueComment = null; + + if (value instanceof Node) { + if (value.spaceBefore) vcb = '\n'; + + if (value.commentBefore) { + const cs = value.commentBefore.replace(/^/gm, `${ctx.indent}#`); + vcb += `\n${cs}`; + } + + valueComment = value.comment; + } else if (value && typeof value === 'object') { + value = doc.schema.createNode(value, true); + } + + ctx.implicitKey = false; + if (!explicitKey && !this.comment && value instanceof Scalar) ctx.indentAtStart = str.length + 1; + chompKeep = false; + + if (!indentSeq && indentSize >= 2 && !ctx.inFlow && !explicitKey && value instanceof YAMLSeq && value.type !== PlainValue.Type.FLOW_SEQ && !value.tag && !doc.anchors.getName(value)) { + // If indentSeq === false, consider '- ' as part of indentation where possible + ctx.indent = ctx.indent.substr(2); + } + + const valueStr = stringify(value, ctx, () => valueComment = null, () => chompKeep = true); + let ws = ' '; + + if (vcb || this.comment) { + ws = `${vcb}\n${ctx.indent}`; + } else if (!explicitKey && value instanceof Collection) { + const flow = valueStr[0] === '[' || valueStr[0] === '{'; + if (!flow || valueStr.includes('\n')) ws = `\n${ctx.indent}`; + } + + if (chompKeep && !valueComment && onChompKeep) onChompKeep(); + return addComment(str + ws + valueStr, ctx.indent, valueComment); + } + +} + +PlainValue._defineProperty(Pair, "Type", { + PAIR: 'PAIR', + MERGE_PAIR: 'MERGE_PAIR' +}); + +const getAliasCount = (node, anchors) => { + if (node instanceof Alias) { + const anchor = anchors.get(node.source); + return anchor.count * anchor.aliasCount; + } else if (node instanceof Collection) { + let count = 0; + + for (const item of node.items) { + const c = getAliasCount(item, anchors); + if (c > count) count = c; + } + + return count; + } else if (node instanceof Pair) { + const kc = getAliasCount(node.key, anchors); + const vc = getAliasCount(node.value, anchors); + return Math.max(kc, vc); + } + + return 1; +}; + +class Alias extends Node { + static stringify({ + range, + source + }, { + anchors, + doc, + implicitKey, + inStringifyKey + }) { + let anchor = Object.keys(anchors).find(a => anchors[a] === source); + if (!anchor && inStringifyKey) anchor = doc.anchors.getName(source) || doc.anchors.newName(); + if (anchor) return `*${anchor}${implicitKey ? ' ' : ''}`; + const msg = doc.anchors.getName(source) ? 'Alias node must be after source node' : 'Source node not found for alias node'; + throw new Error(`${msg} [${range}]`); + } + + constructor(source) { + super(); + this.source = source; + this.type = PlainValue.Type.ALIAS; + } + + set tag(t) { + throw new Error('Alias nodes cannot have tags'); + } + + toJSON(arg, ctx) { + if (!ctx) return toJSON(this.source, arg, ctx); + const { + anchors, + maxAliasCount + } = ctx; + const anchor = anchors.get(this.source); + /* istanbul ignore if */ + + if (!anchor || anchor.res === undefined) { + const msg = 'This should not happen: Alias anchor was not resolved?'; + if (this.cstNode) throw new PlainValue.YAMLReferenceError(this.cstNode, msg);else throw new ReferenceError(msg); + } + + if (maxAliasCount >= 0) { + anchor.count += 1; + if (anchor.aliasCount === 0) anchor.aliasCount = getAliasCount(this.source, anchors); + + if (anchor.count * anchor.aliasCount > maxAliasCount) { + const msg = 'Excessive alias count indicates a resource exhaustion attack'; + if (this.cstNode) throw new PlainValue.YAMLReferenceError(this.cstNode, msg);else throw new ReferenceError(msg); + } + } + + return anchor.res; + } // Only called when stringifying an alias mapping key while constructing + // Object output. + + + toString(ctx) { + return Alias.stringify(this, ctx); + } + +} + +PlainValue._defineProperty(Alias, "default", true); + +function findPair(items, key) { + const k = key instanceof Scalar ? key.value : key; + + for (const it of items) { + if (it instanceof Pair) { + if (it.key === key || it.key === k) return it; + if (it.key && it.key.value === k) return it; + } + } + + return undefined; +} +class YAMLMap extends Collection { + add(pair, overwrite) { + if (!pair) pair = new Pair(pair);else if (!(pair instanceof Pair)) pair = new Pair(pair.key || pair, pair.value); + const prev = findPair(this.items, pair.key); + const sortEntries = this.schema && this.schema.sortMapEntries; + + if (prev) { + if (overwrite) prev.value = pair.value;else throw new Error(`Key ${pair.key} already set`); + } else if (sortEntries) { + const i = this.items.findIndex(item => sortEntries(pair, item) < 0); + if (i === -1) this.items.push(pair);else this.items.splice(i, 0, pair); + } else { + this.items.push(pair); + } + } + + delete(key) { + const it = findPair(this.items, key); + if (!it) return false; + const del = this.items.splice(this.items.indexOf(it), 1); + return del.length > 0; + } + + get(key, keepScalar) { + const it = findPair(this.items, key); + const node = it && it.value; + return !keepScalar && node instanceof Scalar ? node.value : node; + } + + has(key) { + return !!findPair(this.items, key); + } + + set(key, value) { + this.add(new Pair(key, value), true); + } + /** + * @param {*} arg ignored + * @param {*} ctx Conversion context, originally set in Document#toJSON() + * @param {Class} Type If set, forces the returned collection type + * @returns {*} Instance of Type, Map, or Object + */ + + + toJSON(_, ctx, Type) { + const map = Type ? new Type() : ctx && ctx.mapAsMap ? new Map() : {}; + if (ctx && ctx.onCreate) ctx.onCreate(map); + + for (const item of this.items) item.addToJSMap(ctx, map); + + return map; + } + + toString(ctx, onComment, onChompKeep) { + if (!ctx) return JSON.stringify(this); + + for (const item of this.items) { + if (!(item instanceof Pair)) throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); + } + + return super.toString(ctx, { + blockItem: n => n.str, + flowChars: { + start: '{', + end: '}' + }, + isMap: true, + itemIndent: ctx.indent || '' + }, onComment, onChompKeep); + } + +} + +const MERGE_KEY = '<<'; +class Merge extends Pair { + constructor(pair) { + if (pair instanceof Pair) { + let seq = pair.value; + + if (!(seq instanceof YAMLSeq)) { + seq = new YAMLSeq(); + seq.items.push(pair.value); + seq.range = pair.value.range; + } + + super(pair.key, seq); + this.range = pair.range; + } else { + super(new Scalar(MERGE_KEY), new YAMLSeq()); + } + + this.type = Pair.Type.MERGE_PAIR; + } // If the value associated with a merge key is a single mapping node, each of + // its key/value pairs is inserted into the current mapping, unless the key + // already exists in it. If the value associated with the merge key is a + // sequence, then this sequence is expected to contain mapping nodes and each + // of these nodes is merged in turn according to its order in the sequence. + // Keys in mapping nodes earlier in the sequence override keys specified in + // later mapping nodes. -- http://yaml.org/type/merge.html + + + addToJSMap(ctx, map) { + for (const { + source + } of this.value.items) { + if (!(source instanceof YAMLMap)) throw new Error('Merge sources must be maps'); + const srcMap = source.toJSON(null, ctx, Map); + + for (const [key, value] of srcMap) { + if (map instanceof Map) { + if (!map.has(key)) map.set(key, value); + } else if (map instanceof Set) { + map.add(key); + } else { + if (!Object.prototype.hasOwnProperty.call(map, key)) map[key] = value; + } + } + } + + return map; + } + + toString(ctx, onComment) { + const seq = this.value; + if (seq.items.length > 1) return super.toString(ctx, onComment); + this.value = seq.items[0]; + const str = super.toString(ctx, onComment); + this.value = seq; + return str; + } + +} + +const binaryOptions = { + defaultType: PlainValue.Type.BLOCK_LITERAL, + lineWidth: 76 +}; +const boolOptions = { + trueStr: 'true', + falseStr: 'false' +}; +const intOptions = { + asBigInt: false +}; +const nullOptions = { + nullStr: 'null' +}; +const strOptions = { + defaultType: PlainValue.Type.PLAIN, + doubleQuoted: { + jsonEncoding: false, + minMultiLineLength: 40 + }, + fold: { + lineWidth: 80, + minContentWidth: 20 + } +}; + +function resolveScalar(str, tags, scalarFallback) { + for (const { + format, + test, + resolve + } of tags) { + if (test) { + const match = str.match(test); + + if (match) { + let res = resolve.apply(null, match); + if (!(res instanceof Scalar)) res = new Scalar(res); + if (format) res.format = format; + return res; + } + } + } + + if (scalarFallback) str = scalarFallback(str); + return new Scalar(str); +} + +const FOLD_FLOW = 'flow'; +const FOLD_BLOCK = 'block'; +const FOLD_QUOTED = 'quoted'; // presumes i+1 is at the start of a line +// returns index of last newline in more-indented block + +const consumeMoreIndentedLines = (text, i) => { + let ch = text[i + 1]; + + while (ch === ' ' || ch === '\t') { + do { + ch = text[i += 1]; + } while (ch && ch !== '\n'); + + ch = text[i + 1]; + } + + return i; +}; +/** + * Tries to keep input at up to `lineWidth` characters, splitting only on spaces + * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are + * terminated with `\n` and started with `indent`. + * + * @param {string} text + * @param {string} indent + * @param {string} [mode='flow'] `'block'` prevents more-indented lines + * from being folded; `'quoted'` allows for `\` escapes, including escaped + * newlines + * @param {Object} options + * @param {number} [options.indentAtStart] Accounts for leading contents on + * the first line, defaulting to `indent.length` + * @param {number} [options.lineWidth=80] + * @param {number} [options.minContentWidth=20] Allow highly indented lines to + * stretch the line width + * @param {function} options.onFold Called once if the text is folded + * @param {function} options.onFold Called once if any line of text exceeds + * lineWidth characters + */ + + +function foldFlowLines(text, indent, mode, { + indentAtStart, + lineWidth = 80, + minContentWidth = 20, + onFold, + onOverflow +}) { + if (!lineWidth || lineWidth < 0) return text; + const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); + if (text.length <= endStep) return text; + const folds = []; + const escapedFolds = {}; + let end = lineWidth - (typeof indentAtStart === 'number' ? indentAtStart : indent.length); + let split = undefined; + let prev = undefined; + let overflow = false; + let i = -1; + + if (mode === FOLD_BLOCK) { + i = consumeMoreIndentedLines(text, i); + if (i !== -1) end = i + endStep; + } + + for (let ch; ch = text[i += 1];) { + if (mode === FOLD_QUOTED && ch === '\\') { + switch (text[i + 1]) { + case 'x': + i += 3; + break; + + case 'u': + i += 5; + break; + + case 'U': + i += 9; + break; + + default: + i += 1; + } + } + + if (ch === '\n') { + if (mode === FOLD_BLOCK) i = consumeMoreIndentedLines(text, i); + end = i + endStep; + split = undefined; + } else { + if (ch === ' ' && prev && prev !== ' ' && prev !== '\n' && prev !== '\t') { + // space surrounded by non-space can be replaced with newline + indent + const next = text[i + 1]; + if (next && next !== ' ' && next !== '\n' && next !== '\t') split = i; + } + + if (i >= end) { + if (split) { + folds.push(split); + end = split + endStep; + split = undefined; + } else if (mode === FOLD_QUOTED) { + // white-space collected at end may stretch past lineWidth + while (prev === ' ' || prev === '\t') { + prev = ch; + ch = text[i += 1]; + overflow = true; + } // i - 2 accounts for not-dropped last char + newline-escaping \ + + + folds.push(i - 2); + escapedFolds[i - 2] = true; + end = i - 2 + endStep; + split = undefined; + } else { + overflow = true; + } + } + } + + prev = ch; + } + + if (overflow && onOverflow) onOverflow(); + if (folds.length === 0) return text; + if (onFold) onFold(); + let res = text.slice(0, folds[0]); + + for (let i = 0; i < folds.length; ++i) { + const fold = folds[i]; + const end = folds[i + 1] || text.length; + if (mode === FOLD_QUOTED && escapedFolds[fold]) res += `${text[fold]}\\`; + res += `\n${indent}${text.slice(fold + 1, end)}`; + } + + return res; +} + +const getFoldOptions = ({ + indentAtStart +}) => indentAtStart ? Object.assign({ + indentAtStart +}, strOptions.fold) : strOptions.fold; // Also checks for lines starting with %, as parsing the output as YAML 1.1 will +// presume that's starting a new document. + + +const containsDocumentMarker = str => /^(%|---|\.\.\.)/m.test(str); + +function lineLengthOverLimit(str, limit) { + const strLen = str.length; + if (strLen <= limit) return false; + + for (let i = 0, start = 0; i < strLen; ++i) { + if (str[i] === '\n') { + if (i - start > limit) return true; + start = i + 1; + if (strLen - start <= limit) return false; + } + } + + return true; +} + +function doubleQuotedString(value, ctx) { + const { + implicitKey + } = ctx; + const { + jsonEncoding, + minMultiLineLength + } = strOptions.doubleQuoted; + const json = JSON.stringify(value); + if (jsonEncoding) return json; + const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + let str = ''; + let start = 0; + + for (let i = 0, ch = json[i]; ch; ch = json[++i]) { + if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { + // space before newline needs to be escaped to not be folded + str += json.slice(start, i) + '\\ '; + i += 1; + start = i; + ch = '\\'; + } + + if (ch === '\\') switch (json[i + 1]) { + case 'u': + { + str += json.slice(start, i); + const code = json.substr(i + 2, 4); + + switch (code) { + case '0000': + str += '\\0'; + break; + + case '0007': + str += '\\a'; + break; + + case '000b': + str += '\\v'; + break; + + case '001b': + str += '\\e'; + break; + + case '0085': + str += '\\N'; + break; + + case '00a0': + str += '\\_'; + break; + + case '2028': + str += '\\L'; + break; + + case '2029': + str += '\\P'; + break; + + default: + if (code.substr(0, 2) === '00') str += '\\x' + code.substr(2);else str += json.substr(i, 6); + } + + i += 5; + start = i + 1; + } + break; + + case 'n': + if (implicitKey || json[i + 2] === '"' || json.length < minMultiLineLength) { + i += 1; + } else { + // folding will eat first newline + str += json.slice(start, i) + '\n\n'; + + while (json[i + 2] === '\\' && json[i + 3] === 'n' && json[i + 4] !== '"') { + str += '\n'; + i += 2; + } + + str += indent; // space after newline needs to be escaped to not be folded + + if (json[i + 2] === ' ') str += '\\'; + i += 1; + start = i + 1; + } + + break; + + default: + i += 1; + } + } + + str = start ? str + json.slice(start) : json; + return implicitKey ? str : foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx)); +} + +function singleQuotedString(value, ctx) { + if (ctx.implicitKey) { + if (/\n/.test(value)) return doubleQuotedString(value, ctx); + } else { + // single quoted string can't have leading or trailing whitespace around newline + if (/[ \t]\n|\n[ \t]/.test(value)) return doubleQuotedString(value, ctx); + } + + const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; + return ctx.implicitKey ? res : foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx)); +} + +function blockString({ + comment, + type, + value +}, ctx, onComment, onChompKeep) { + // 1. Block can't end in whitespace unless the last line is non-empty. + // 2. Strings consisting of only whitespace are best rendered explicitly. + if (/\n[\t ]+$/.test(value) || /^\s*$/.test(value)) { + return doubleQuotedString(value, ctx); + } + + const indent = ctx.indent || (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : ''); + const indentSize = indent ? '2' : '1'; // root is at -1 + + const literal = type === PlainValue.Type.BLOCK_FOLDED ? false : type === PlainValue.Type.BLOCK_LITERAL ? true : !lineLengthOverLimit(value, strOptions.fold.lineWidth - indent.length); + let header = literal ? '|' : '>'; + if (!value) return header + '\n'; + let wsStart = ''; + let wsEnd = ''; + value = value.replace(/[\n\t ]*$/, ws => { + const n = ws.indexOf('\n'); + + if (n === -1) { + header += '-'; // strip + } else if (value === ws || n !== ws.length - 1) { + header += '+'; // keep + + if (onChompKeep) onChompKeep(); + } + + wsEnd = ws.replace(/\n$/, ''); + return ''; + }).replace(/^[\n ]*/, ws => { + if (ws.indexOf(' ') !== -1) header += indentSize; + const m = ws.match(/ +$/); + + if (m) { + wsStart = ws.slice(0, -m[0].length); + return m[0]; + } else { + wsStart = ws; + return ''; + } + }); + if (wsEnd) wsEnd = wsEnd.replace(/\n+(?!\n|$)/g, `$&${indent}`); + if (wsStart) wsStart = wsStart.replace(/\n+/g, `$&${indent}`); + + if (comment) { + header += ' #' + comment.replace(/ ?[\r\n]+/g, ' '); + if (onComment) onComment(); + } + + if (!value) return `${header}${indentSize}\n${indent}${wsEnd}`; + + if (literal) { + value = value.replace(/\n+/g, `$&${indent}`); + return `${header}\n${indent}${wsStart}${value}${wsEnd}`; + } + + value = value.replace(/\n+/g, '\n$&').replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded + // ^ ind.line ^ empty ^ capture next empty lines only at end of indent + .replace(/\n+/g, `$&${indent}`); + const body = foldFlowLines(`${wsStart}${value}${wsEnd}`, indent, FOLD_BLOCK, strOptions.fold); + return `${header}\n${indent}${body}`; +} + +function plainString(item, ctx, onComment, onChompKeep) { + const { + comment, + type, + value + } = item; + const { + actualString, + implicitKey, + indent, + inFlow + } = ctx; + + if (implicitKey && /[\n[\]{},]/.test(value) || inFlow && /[[\]{},]/.test(value)) { + return doubleQuotedString(value, ctx); + } + + if (!value || /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { + // not allowed: + // - empty string, '-' or '?' + // - start with an indicator character (except [?:-]) or /[?-] / + // - '\n ', ': ' or ' \n' anywhere + // - '#' not preceded by a non-space char + // - end with ' ' or ':' + return implicitKey || inFlow || value.indexOf('\n') === -1 ? value.indexOf('"') !== -1 && value.indexOf("'") === -1 ? singleQuotedString(value, ctx) : doubleQuotedString(value, ctx) : blockString(item, ctx, onComment, onChompKeep); + } + + if (!implicitKey && !inFlow && type !== PlainValue.Type.PLAIN && value.indexOf('\n') !== -1) { + // Where allowed & type not set explicitly, prefer block style for multiline strings + return blockString(item, ctx, onComment, onChompKeep); + } + + if (indent === '' && containsDocumentMarker(value)) { + ctx.forceBlockIndent = true; + return blockString(item, ctx, onComment, onChompKeep); + } + + const str = value.replace(/\n+/g, `$&\n${indent}`); // Verify that output will be parsed as a string, as e.g. plain numbers and + // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), + // and others in v1.1. + + if (actualString) { + const { + tags + } = ctx.doc.schema; + const resolved = resolveScalar(str, tags, tags.scalarFallback).value; + if (typeof resolved !== 'string') return doubleQuotedString(value, ctx); + } + + const body = implicitKey ? str : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx)); + + if (comment && !inFlow && (body.indexOf('\n') !== -1 || comment.indexOf('\n') !== -1)) { + if (onComment) onComment(); + return addCommentBefore(body, indent, comment); + } + + return body; +} + +function stringifyString(item, ctx, onComment, onChompKeep) { + const { + defaultType + } = strOptions; + const { + implicitKey, + inFlow + } = ctx; + let { + type, + value + } = item; + + if (typeof value !== 'string') { + value = String(value); + item = Object.assign({}, item, { + value + }); + } + + const _stringify = _type => { + switch (_type) { + case PlainValue.Type.BLOCK_FOLDED: + case PlainValue.Type.BLOCK_LITERAL: + return blockString(item, ctx, onComment, onChompKeep); + + case PlainValue.Type.QUOTE_DOUBLE: + return doubleQuotedString(value, ctx); + + case PlainValue.Type.QUOTE_SINGLE: + return singleQuotedString(value, ctx); + + case PlainValue.Type.PLAIN: + return plainString(item, ctx, onComment, onChompKeep); + + default: + return null; + } + }; + + if (type !== PlainValue.Type.QUOTE_DOUBLE && /[\x00-\x08\x0b-\x1f\x7f-\x9f]/.test(value)) { + // force double quotes on control characters + type = PlainValue.Type.QUOTE_DOUBLE; + } else if ((implicitKey || inFlow) && (type === PlainValue.Type.BLOCK_FOLDED || type === PlainValue.Type.BLOCK_LITERAL)) { + // should not happen; blocks are not valid inside flow containers + type = PlainValue.Type.QUOTE_DOUBLE; + } + + let res = _stringify(type); + + if (res === null) { + res = _stringify(defaultType); + if (res === null) throw new Error(`Unsupported default string type ${defaultType}`); + } + + return res; +} + +function stringifyNumber({ + format, + minFractionDigits, + tag, + value +}) { + if (typeof value === 'bigint') return String(value); + if (!isFinite(value)) return isNaN(value) ? '.nan' : value < 0 ? '-.inf' : '.inf'; + let n = JSON.stringify(value); + + if (!format && minFractionDigits && (!tag || tag === 'tag:yaml.org,2002:float') && /^\d/.test(n)) { + let i = n.indexOf('.'); + + if (i < 0) { + i = n.length; + n += '.'; + } + + let d = minFractionDigits - (n.length - i - 1); + + while (d-- > 0) n += '0'; + } + + return n; +} + +function checkFlowCollectionEnd(errors, cst) { + let char, name; + + switch (cst.type) { + case PlainValue.Type.FLOW_MAP: + char = '}'; + name = 'flow map'; + break; + + case PlainValue.Type.FLOW_SEQ: + char = ']'; + name = 'flow sequence'; + break; + + default: + errors.push(new PlainValue.YAMLSemanticError(cst, 'Not a flow collection!?')); + return; + } + + let lastItem; + + for (let i = cst.items.length - 1; i >= 0; --i) { + const item = cst.items[i]; + + if (!item || item.type !== PlainValue.Type.COMMENT) { + lastItem = item; + break; + } + } + + if (lastItem && lastItem.char !== char) { + const msg = `Expected ${name} to end with ${char}`; + let err; + + if (typeof lastItem.offset === 'number') { + err = new PlainValue.YAMLSemanticError(cst, msg); + err.offset = lastItem.offset + 1; + } else { + err = new PlainValue.YAMLSemanticError(lastItem, msg); + if (lastItem.range && lastItem.range.end) err.offset = lastItem.range.end - lastItem.range.start; + } + + errors.push(err); + } +} +function checkFlowCommentSpace(errors, comment) { + const prev = comment.context.src[comment.range.start - 1]; + + if (prev !== '\n' && prev !== '\t' && prev !== ' ') { + const msg = 'Comments must be separated from other tokens by white space characters'; + errors.push(new PlainValue.YAMLSemanticError(comment, msg)); + } +} +function getLongKeyError(source, key) { + const sk = String(key); + const k = sk.substr(0, 8) + '...' + sk.substr(-8); + return new PlainValue.YAMLSemanticError(source, `The "${k}" key is too long`); +} +function resolveComments(collection, comments) { + for (const { + afterKey, + before, + comment + } of comments) { + let item = collection.items[before]; + + if (!item) { + if (comment !== undefined) { + if (collection.comment) collection.comment += '\n' + comment;else collection.comment = comment; + } + } else { + if (afterKey && item.value) item = item.value; + + if (comment === undefined) { + if (afterKey || !item.commentBefore) item.spaceBefore = true; + } else { + if (item.commentBefore) item.commentBefore += '\n' + comment;else item.commentBefore = comment; + } + } + } +} + +// on error, will return { str: string, errors: Error[] } +function resolveString(doc, node) { + const res = node.strValue; + if (!res) return ''; + if (typeof res === 'string') return res; + res.errors.forEach(error => { + if (!error.source) error.source = node; + doc.errors.push(error); + }); + return res.str; +} + +function resolveTagHandle(doc, node) { + const { + handle, + suffix + } = node.tag; + let prefix = doc.tagPrefixes.find(p => p.handle === handle); + + if (!prefix) { + const dtp = doc.getDefaults().tagPrefixes; + if (dtp) prefix = dtp.find(p => p.handle === handle); + if (!prefix) throw new PlainValue.YAMLSemanticError(node, `The ${handle} tag handle is non-default and was not declared.`); + } + + if (!suffix) throw new PlainValue.YAMLSemanticError(node, `The ${handle} tag has no suffix.`); + + if (handle === '!' && (doc.version || doc.options.version) === '1.0') { + if (suffix[0] === '^') { + doc.warnings.push(new PlainValue.YAMLWarning(node, 'YAML 1.0 ^ tag expansion is not supported')); + return suffix; + } + + if (/[:/]/.test(suffix)) { + // word/foo -> tag:word.yaml.org,2002:foo + const vocab = suffix.match(/^([a-z0-9-]+)\/(.*)/i); + return vocab ? `tag:${vocab[1]}.yaml.org,2002:${vocab[2]}` : `tag:${suffix}`; + } + } + + return prefix.prefix + decodeURIComponent(suffix); +} + +function resolveTagName(doc, node) { + const { + tag, + type + } = node; + let nonSpecific = false; + + if (tag) { + const { + handle, + suffix, + verbatim + } = tag; + + if (verbatim) { + if (verbatim !== '!' && verbatim !== '!!') return verbatim; + const msg = `Verbatim tags aren't resolved, so ${verbatim} is invalid.`; + doc.errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } else if (handle === '!' && !suffix) { + nonSpecific = true; + } else { + try { + return resolveTagHandle(doc, node); + } catch (error) { + doc.errors.push(error); + } + } + } + + switch (type) { + case PlainValue.Type.BLOCK_FOLDED: + case PlainValue.Type.BLOCK_LITERAL: + case PlainValue.Type.QUOTE_DOUBLE: + case PlainValue.Type.QUOTE_SINGLE: + return PlainValue.defaultTags.STR; + + case PlainValue.Type.FLOW_MAP: + case PlainValue.Type.MAP: + return PlainValue.defaultTags.MAP; + + case PlainValue.Type.FLOW_SEQ: + case PlainValue.Type.SEQ: + return PlainValue.defaultTags.SEQ; + + case PlainValue.Type.PLAIN: + return nonSpecific ? PlainValue.defaultTags.STR : null; + + default: + return null; + } +} + +function resolveByTagName(doc, node, tagName) { + const { + tags + } = doc.schema; + const matchWithTest = []; + + for (const tag of tags) { + if (tag.tag === tagName) { + if (tag.test) matchWithTest.push(tag);else { + const res = tag.resolve(doc, node); + return res instanceof Collection ? res : new Scalar(res); + } + } + } + + const str = resolveString(doc, node); + if (typeof str === 'string' && matchWithTest.length > 0) return resolveScalar(str, matchWithTest, tags.scalarFallback); + return null; +} + +function getFallbackTagName({ + type +}) { + switch (type) { + case PlainValue.Type.FLOW_MAP: + case PlainValue.Type.MAP: + return PlainValue.defaultTags.MAP; + + case PlainValue.Type.FLOW_SEQ: + case PlainValue.Type.SEQ: + return PlainValue.defaultTags.SEQ; + + default: + return PlainValue.defaultTags.STR; + } +} + +function resolveTag(doc, node, tagName) { + try { + const res = resolveByTagName(doc, node, tagName); + + if (res) { + if (tagName && node.tag) res.tag = tagName; + return res; + } + } catch (error) { + /* istanbul ignore if */ + if (!error.source) error.source = node; + doc.errors.push(error); + return null; + } + + try { + const fallback = getFallbackTagName(node); + if (!fallback) throw new Error(`The tag ${tagName} is unavailable`); + const msg = `The tag ${tagName} is unavailable, falling back to ${fallback}`; + doc.warnings.push(new PlainValue.YAMLWarning(node, msg)); + const res = resolveByTagName(doc, node, fallback); + res.tag = tagName; + return res; + } catch (error) { + const refError = new PlainValue.YAMLReferenceError(node, error.message); + refError.stack = error.stack; + doc.errors.push(refError); + return null; + } +} + +const isCollectionItem = node => { + if (!node) return false; + const { + type + } = node; + return type === PlainValue.Type.MAP_KEY || type === PlainValue.Type.MAP_VALUE || type === PlainValue.Type.SEQ_ITEM; +}; + +function resolveNodeProps(errors, node) { + const comments = { + before: [], + after: [] + }; + let hasAnchor = false; + let hasTag = false; + const props = isCollectionItem(node.context.parent) ? node.context.parent.props.concat(node.props) : node.props; + + for (const { + start, + end + } of props) { + switch (node.context.src[start]) { + case PlainValue.Char.COMMENT: + { + if (!node.commentHasRequiredWhitespace(start)) { + const msg = 'Comments must be separated from other tokens by white space characters'; + errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } + + const { + header, + valueRange + } = node; + const cc = valueRange && (start > valueRange.start || header && start > header.start) ? comments.after : comments.before; + cc.push(node.context.src.slice(start + 1, end)); + break; + } + // Actual anchor & tag resolution is handled by schema, here we just complain + + case PlainValue.Char.ANCHOR: + if (hasAnchor) { + const msg = 'A node can have at most one anchor'; + errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } + + hasAnchor = true; + break; + + case PlainValue.Char.TAG: + if (hasTag) { + const msg = 'A node can have at most one tag'; + errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } + + hasTag = true; + break; + } + } + + return { + comments, + hasAnchor, + hasTag + }; +} + +function resolveNodeValue(doc, node) { + const { + anchors, + errors, + schema + } = doc; + + if (node.type === PlainValue.Type.ALIAS) { + const name = node.rawValue; + const src = anchors.getNode(name); + + if (!src) { + const msg = `Aliased anchor not found: ${name}`; + errors.push(new PlainValue.YAMLReferenceError(node, msg)); + return null; + } // Lazy resolution for circular references + + + const res = new Alias(src); + + anchors._cstAliases.push(res); + + return res; + } + + const tagName = resolveTagName(doc, node); + if (tagName) return resolveTag(doc, node, tagName); + + if (node.type !== PlainValue.Type.PLAIN) { + const msg = `Failed to resolve ${node.type} node here`; + errors.push(new PlainValue.YAMLSyntaxError(node, msg)); + return null; + } + + try { + const str = resolveString(doc, node); + return resolveScalar(str, schema.tags, schema.tags.scalarFallback); + } catch (error) { + if (!error.source) error.source = node; + errors.push(error); + return null; + } +} // sets node.resolved on success + + +function resolveNode(doc, node) { + if (!node) return null; + if (node.error) doc.errors.push(node.error); + const { + comments, + hasAnchor, + hasTag + } = resolveNodeProps(doc.errors, node); + + if (hasAnchor) { + const { + anchors + } = doc; + const name = node.anchor; + const prev = anchors.getNode(name); // At this point, aliases for any preceding node with the same anchor + // name have already been resolved, so it may safely be renamed. + + if (prev) anchors.map[anchors.newName(name)] = prev; // During parsing, we need to store the CST node in anchors.map as + // anchors need to be available during resolution to allow for + // circular references. + + anchors.map[name] = node; + } + + if (node.type === PlainValue.Type.ALIAS && (hasAnchor || hasTag)) { + const msg = 'An alias node must not specify any properties'; + doc.errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } + + const res = resolveNodeValue(doc, node); + + if (res) { + res.range = [node.range.start, node.range.end]; + if (doc.options.keepCstNodes) res.cstNode = node; + if (doc.options.keepNodeTypes) res.type = node.type; + const cb = comments.before.join('\n'); + + if (cb) { + res.commentBefore = res.commentBefore ? `${res.commentBefore}\n${cb}` : cb; + } + + const ca = comments.after.join('\n'); + if (ca) res.comment = res.comment ? `${res.comment}\n${ca}` : ca; + } + + return node.resolved = res; +} + +function resolveMap(doc, cst) { + if (cst.type !== PlainValue.Type.MAP && cst.type !== PlainValue.Type.FLOW_MAP) { + const msg = `A ${cst.type} node cannot be resolved as a mapping`; + doc.errors.push(new PlainValue.YAMLSyntaxError(cst, msg)); + return null; + } + + const { + comments, + items + } = cst.type === PlainValue.Type.FLOW_MAP ? resolveFlowMapItems(doc, cst) : resolveBlockMapItems(doc, cst); + const map = new YAMLMap(); + map.items = items; + resolveComments(map, comments); + let hasCollectionKey = false; + + for (let i = 0; i < items.length; ++i) { + const { + key: iKey + } = items[i]; + if (iKey instanceof Collection) hasCollectionKey = true; + + if (doc.schema.merge && iKey && iKey.value === MERGE_KEY) { + items[i] = new Merge(items[i]); + const sources = items[i].value.items; + let error = null; + sources.some(node => { + if (node instanceof Alias) { + // During parsing, alias sources are CST nodes; to account for + // circular references their resolved values can't be used here. + const { + type + } = node.source; + if (type === PlainValue.Type.MAP || type === PlainValue.Type.FLOW_MAP) return false; + return error = 'Merge nodes aliases can only point to maps'; + } + + return error = 'Merge nodes can only have Alias nodes as values'; + }); + if (error) doc.errors.push(new PlainValue.YAMLSemanticError(cst, error)); + } else { + for (let j = i + 1; j < items.length; ++j) { + const { + key: jKey + } = items[j]; + + if (iKey === jKey || iKey && jKey && Object.prototype.hasOwnProperty.call(iKey, 'value') && iKey.value === jKey.value) { + const msg = `Map keys must be unique; "${iKey}" is repeated`; + doc.errors.push(new PlainValue.YAMLSemanticError(cst, msg)); + break; + } + } + } + } + + if (hasCollectionKey && !doc.options.mapAsMap) { + const warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.'; + doc.warnings.push(new PlainValue.YAMLWarning(cst, warn)); + } + + cst.resolved = map; + return map; +} + +const valueHasPairComment = ({ + context: { + lineStart, + node, + src + }, + props +}) => { + if (props.length === 0) return false; + const { + start + } = props[0]; + if (node && start > node.valueRange.start) return false; + if (src[start] !== PlainValue.Char.COMMENT) return false; + + for (let i = lineStart; i < start; ++i) if (src[i] === '\n') return false; + + return true; +}; + +function resolvePairComment(item, pair) { + if (!valueHasPairComment(item)) return; + const comment = item.getPropValue(0, PlainValue.Char.COMMENT, true); + let found = false; + const cb = pair.value.commentBefore; + + if (cb && cb.startsWith(comment)) { + pair.value.commentBefore = cb.substr(comment.length + 1); + found = true; + } else { + const cc = pair.value.comment; + + if (!item.node && cc && cc.startsWith(comment)) { + pair.value.comment = cc.substr(comment.length + 1); + found = true; + } + } + + if (found) pair.comment = comment; +} + +function resolveBlockMapItems(doc, cst) { + const comments = []; + const items = []; + let key = undefined; + let keyStart = null; + + for (let i = 0; i < cst.items.length; ++i) { + const item = cst.items[i]; + + switch (item.type) { + case PlainValue.Type.BLANK_LINE: + comments.push({ + afterKey: !!key, + before: items.length + }); + break; + + case PlainValue.Type.COMMENT: + comments.push({ + afterKey: !!key, + before: items.length, + comment: item.comment + }); + break; + + case PlainValue.Type.MAP_KEY: + if (key !== undefined) items.push(new Pair(key)); + if (item.error) doc.errors.push(item.error); + key = resolveNode(doc, item.node); + keyStart = null; + break; + + case PlainValue.Type.MAP_VALUE: + { + if (key === undefined) key = null; + if (item.error) doc.errors.push(item.error); + + if (!item.context.atLineStart && item.node && item.node.type === PlainValue.Type.MAP && !item.node.context.atLineStart) { + const msg = 'Nested mappings are not allowed in compact mappings'; + doc.errors.push(new PlainValue.YAMLSemanticError(item.node, msg)); + } + + let valueNode = item.node; + + if (!valueNode && item.props.length > 0) { + // Comments on an empty mapping value need to be preserved, so we + // need to construct a minimal empty node here to use instead of the + // missing `item.node`. -- eemeli/yaml#19 + valueNode = new PlainValue.PlainValue(PlainValue.Type.PLAIN, []); + valueNode.context = { + parent: item, + src: item.context.src + }; + const pos = item.range.start + 1; + valueNode.range = { + start: pos, + end: pos + }; + valueNode.valueRange = { + start: pos, + end: pos + }; + + if (typeof item.range.origStart === 'number') { + const origPos = item.range.origStart + 1; + valueNode.range.origStart = valueNode.range.origEnd = origPos; + valueNode.valueRange.origStart = valueNode.valueRange.origEnd = origPos; + } + } + + const pair = new Pair(key, resolveNode(doc, valueNode)); + resolvePairComment(item, pair); + items.push(pair); + + if (key && typeof keyStart === 'number') { + if (item.range.start > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key)); + } + + key = undefined; + keyStart = null; + } + break; + + default: + if (key !== undefined) items.push(new Pair(key)); + key = resolveNode(doc, item); + keyStart = item.range.start; + if (item.error) doc.errors.push(item.error); + + next: for (let j = i + 1;; ++j) { + const nextItem = cst.items[j]; + + switch (nextItem && nextItem.type) { + case PlainValue.Type.BLANK_LINE: + case PlainValue.Type.COMMENT: + continue next; + + case PlainValue.Type.MAP_VALUE: + break next; + + default: + { + const msg = 'Implicit map keys need to be followed by map values'; + doc.errors.push(new PlainValue.YAMLSemanticError(item, msg)); + break next; + } + } + } + + if (item.valueRangeContainsNewline) { + const msg = 'Implicit map keys need to be on a single line'; + doc.errors.push(new PlainValue.YAMLSemanticError(item, msg)); + } + + } + } + + if (key !== undefined) items.push(new Pair(key)); + return { + comments, + items + }; +} + +function resolveFlowMapItems(doc, cst) { + const comments = []; + const items = []; + let key = undefined; + let explicitKey = false; + let next = '{'; + + for (let i = 0; i < cst.items.length; ++i) { + const item = cst.items[i]; + + if (typeof item.char === 'string') { + const { + char, + offset + } = item; + + if (char === '?' && key === undefined && !explicitKey) { + explicitKey = true; + next = ':'; + continue; + } + + if (char === ':') { + if (key === undefined) key = null; + + if (next === ':') { + next = ','; + continue; + } + } else { + if (explicitKey) { + if (key === undefined && char !== ',') key = null; + explicitKey = false; + } + + if (key !== undefined) { + items.push(new Pair(key)); + key = undefined; + + if (char === ',') { + next = ':'; + continue; + } + } + } + + if (char === '}') { + if (i === cst.items.length - 1) continue; + } else if (char === next) { + next = ':'; + continue; + } + + const msg = `Flow map contains an unexpected ${char}`; + const err = new PlainValue.YAMLSyntaxError(cst, msg); + err.offset = offset; + doc.errors.push(err); + } else if (item.type === PlainValue.Type.BLANK_LINE) { + comments.push({ + afterKey: !!key, + before: items.length + }); + } else if (item.type === PlainValue.Type.COMMENT) { + checkFlowCommentSpace(doc.errors, item); + comments.push({ + afterKey: !!key, + before: items.length, + comment: item.comment + }); + } else if (key === undefined) { + if (next === ',') doc.errors.push(new PlainValue.YAMLSemanticError(item, 'Separator , missing in flow map')); + key = resolveNode(doc, item); + } else { + if (next !== ',') doc.errors.push(new PlainValue.YAMLSemanticError(item, 'Indicator : missing in flow map entry')); + items.push(new Pair(key, resolveNode(doc, item))); + key = undefined; + explicitKey = false; + } + } + + checkFlowCollectionEnd(doc.errors, cst); + if (key !== undefined) items.push(new Pair(key)); + return { + comments, + items + }; +} + +function resolveSeq(doc, cst) { + if (cst.type !== PlainValue.Type.SEQ && cst.type !== PlainValue.Type.FLOW_SEQ) { + const msg = `A ${cst.type} node cannot be resolved as a sequence`; + doc.errors.push(new PlainValue.YAMLSyntaxError(cst, msg)); + return null; + } + + const { + comments, + items + } = cst.type === PlainValue.Type.FLOW_SEQ ? resolveFlowSeqItems(doc, cst) : resolveBlockSeqItems(doc, cst); + const seq = new YAMLSeq(); + seq.items = items; + resolveComments(seq, comments); + + if (!doc.options.mapAsMap && items.some(it => it instanceof Pair && it.key instanceof Collection)) { + const warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.'; + doc.warnings.push(new PlainValue.YAMLWarning(cst, warn)); + } + + cst.resolved = seq; + return seq; +} + +function resolveBlockSeqItems(doc, cst) { + const comments = []; + const items = []; + + for (let i = 0; i < cst.items.length; ++i) { + const item = cst.items[i]; + + switch (item.type) { + case PlainValue.Type.BLANK_LINE: + comments.push({ + before: items.length + }); + break; + + case PlainValue.Type.COMMENT: + comments.push({ + comment: item.comment, + before: items.length + }); + break; + + case PlainValue.Type.SEQ_ITEM: + if (item.error) doc.errors.push(item.error); + items.push(resolveNode(doc, item.node)); + + if (item.hasProps) { + const msg = 'Sequence items cannot have tags or anchors before the - indicator'; + doc.errors.push(new PlainValue.YAMLSemanticError(item, msg)); + } + + break; + + default: + if (item.error) doc.errors.push(item.error); + doc.errors.push(new PlainValue.YAMLSyntaxError(item, `Unexpected ${item.type} node in sequence`)); + } + } + + return { + comments, + items + }; +} + +function resolveFlowSeqItems(doc, cst) { + const comments = []; + const items = []; + let explicitKey = false; + let key = undefined; + let keyStart = null; + let next = '['; + let prevItem = null; + + for (let i = 0; i < cst.items.length; ++i) { + const item = cst.items[i]; + + if (typeof item.char === 'string') { + const { + char, + offset + } = item; + + if (char !== ':' && (explicitKey || key !== undefined)) { + if (explicitKey && key === undefined) key = next ? items.pop() : null; + items.push(new Pair(key)); + explicitKey = false; + key = undefined; + keyStart = null; + } + + if (char === next) { + next = null; + } else if (!next && char === '?') { + explicitKey = true; + } else if (next !== '[' && char === ':' && key === undefined) { + if (next === ',') { + key = items.pop(); + + if (key instanceof Pair) { + const msg = 'Chaining flow sequence pairs is invalid'; + const err = new PlainValue.YAMLSemanticError(cst, msg); + err.offset = offset; + doc.errors.push(err); + } + + if (!explicitKey && typeof keyStart === 'number') { + const keyEnd = item.range ? item.range.start : item.offset; + if (keyEnd > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key)); + const { + src + } = prevItem.context; + + for (let i = keyStart; i < keyEnd; ++i) if (src[i] === '\n') { + const msg = 'Implicit keys of flow sequence pairs need to be on a single line'; + doc.errors.push(new PlainValue.YAMLSemanticError(prevItem, msg)); + break; + } + } + } else { + key = null; + } + + keyStart = null; + explicitKey = false; + next = null; + } else if (next === '[' || char !== ']' || i < cst.items.length - 1) { + const msg = `Flow sequence contains an unexpected ${char}`; + const err = new PlainValue.YAMLSyntaxError(cst, msg); + err.offset = offset; + doc.errors.push(err); + } + } else if (item.type === PlainValue.Type.BLANK_LINE) { + comments.push({ + before: items.length + }); + } else if (item.type === PlainValue.Type.COMMENT) { + checkFlowCommentSpace(doc.errors, item); + comments.push({ + comment: item.comment, + before: items.length + }); + } else { + if (next) { + const msg = `Expected a ${next} in flow sequence`; + doc.errors.push(new PlainValue.YAMLSemanticError(item, msg)); + } + + const value = resolveNode(doc, item); + + if (key === undefined) { + items.push(value); + prevItem = item; + } else { + items.push(new Pair(key, value)); + key = undefined; + } + + keyStart = item.range.start; + next = ','; + } + } + + checkFlowCollectionEnd(doc.errors, cst); + if (key !== undefined) items.push(new Pair(key)); + return { + comments, + items + }; +} + +exports.Alias = Alias; +exports.Collection = Collection; +exports.Merge = Merge; +exports.Node = Node; +exports.Pair = Pair; +exports.Scalar = Scalar; +exports.YAMLMap = YAMLMap; +exports.YAMLSeq = YAMLSeq; +exports.addComment = addComment; +exports.binaryOptions = binaryOptions; +exports.boolOptions = boolOptions; +exports.findPair = findPair; +exports.intOptions = intOptions; +exports.isEmptyPath = isEmptyPath; +exports.nullOptions = nullOptions; +exports.resolveMap = resolveMap; +exports.resolveNode = resolveNode; +exports.resolveSeq = resolveSeq; +exports.resolveString = resolveString; +exports.strOptions = strOptions; +exports.stringifyNumber = stringifyNumber; +exports.stringifyString = stringifyString; +exports.toJSON = toJSON; + + +/***/ }), + +/***/ 383: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var PlainValue = __nccwpck_require__(215); +var resolveSeq = __nccwpck_require__(140); + +/* global atob, btoa, Buffer */ +const binary = { + identify: value => value instanceof Uint8Array, + // Buffer inherits from Uint8Array + default: false, + tag: 'tag:yaml.org,2002:binary', + + /** + * Returns a Buffer in node and an Uint8Array in browsers + * + * To use the resulting buffer as an image, you'll want to do something like: + * + * const blob = new Blob([buffer], { type: 'image/jpeg' }) + * document.querySelector('#photo').src = URL.createObjectURL(blob) + */ + resolve: (doc, node) => { + const src = resolveSeq.resolveString(doc, node); + + if (typeof Buffer === 'function') { + return Buffer.from(src, 'base64'); + } else if (typeof atob === 'function') { + // On IE 11, atob() can't handle newlines + const str = atob(src.replace(/[\n\r]/g, '')); + const buffer = new Uint8Array(str.length); + + for (let i = 0; i < str.length; ++i) buffer[i] = str.charCodeAt(i); + + return buffer; + } else { + const msg = 'This environment does not support reading binary tags; either Buffer or atob is required'; + doc.errors.push(new PlainValue.YAMLReferenceError(node, msg)); + return null; + } + }, + options: resolveSeq.binaryOptions, + stringify: ({ + comment, + type, + value + }, ctx, onComment, onChompKeep) => { + let src; + + if (typeof Buffer === 'function') { + src = value instanceof Buffer ? value.toString('base64') : Buffer.from(value.buffer).toString('base64'); + } else if (typeof btoa === 'function') { + let s = ''; + + for (let i = 0; i < value.length; ++i) s += String.fromCharCode(value[i]); + + src = btoa(s); + } else { + throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); + } + + if (!type) type = resolveSeq.binaryOptions.defaultType; + + if (type === PlainValue.Type.QUOTE_DOUBLE) { + value = src; + } else { + const { + lineWidth + } = resolveSeq.binaryOptions; + const n = Math.ceil(src.length / lineWidth); + const lines = new Array(n); + + for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { + lines[i] = src.substr(o, lineWidth); + } + + value = lines.join(type === PlainValue.Type.BLOCK_LITERAL ? '\n' : ' '); + } + + return resolveSeq.stringifyString({ + comment, + type, + value + }, ctx, onComment, onChompKeep); + } +}; + +function parsePairs(doc, cst) { + const seq = resolveSeq.resolveSeq(doc, cst); + + for (let i = 0; i < seq.items.length; ++i) { + let item = seq.items[i]; + if (item instanceof resolveSeq.Pair) continue;else if (item instanceof resolveSeq.YAMLMap) { + if (item.items.length > 1) { + const msg = 'Each pair must have its own sequence indicator'; + throw new PlainValue.YAMLSemanticError(cst, msg); + } + + const pair = item.items[0] || new resolveSeq.Pair(); + if (item.commentBefore) pair.commentBefore = pair.commentBefore ? `${item.commentBefore}\n${pair.commentBefore}` : item.commentBefore; + if (item.comment) pair.comment = pair.comment ? `${item.comment}\n${pair.comment}` : item.comment; + item = pair; + } + seq.items[i] = item instanceof resolveSeq.Pair ? item : new resolveSeq.Pair(item); + } + + return seq; +} +function createPairs(schema, iterable, ctx) { + const pairs = new resolveSeq.YAMLSeq(schema); + pairs.tag = 'tag:yaml.org,2002:pairs'; + + for (const it of iterable) { + let key, value; + + if (Array.isArray(it)) { + if (it.length === 2) { + key = it[0]; + value = it[1]; + } else throw new TypeError(`Expected [key, value] tuple: ${it}`); + } else if (it && it instanceof Object) { + const keys = Object.keys(it); + + if (keys.length === 1) { + key = keys[0]; + value = it[key]; + } else throw new TypeError(`Expected { key: value } tuple: ${it}`); + } else { + key = it; + } + + const pair = schema.createPair(key, value, ctx); + pairs.items.push(pair); + } + + return pairs; +} +const pairs = { + default: false, + tag: 'tag:yaml.org,2002:pairs', + resolve: parsePairs, + createNode: createPairs +}; + +class YAMLOMap extends resolveSeq.YAMLSeq { + constructor() { + super(); + + PlainValue._defineProperty(this, "add", resolveSeq.YAMLMap.prototype.add.bind(this)); + + PlainValue._defineProperty(this, "delete", resolveSeq.YAMLMap.prototype.delete.bind(this)); + + PlainValue._defineProperty(this, "get", resolveSeq.YAMLMap.prototype.get.bind(this)); + + PlainValue._defineProperty(this, "has", resolveSeq.YAMLMap.prototype.has.bind(this)); + + PlainValue._defineProperty(this, "set", resolveSeq.YAMLMap.prototype.set.bind(this)); + + this.tag = YAMLOMap.tag; + } + + toJSON(_, ctx) { + const map = new Map(); + if (ctx && ctx.onCreate) ctx.onCreate(map); + + for (const pair of this.items) { + let key, value; + + if (pair instanceof resolveSeq.Pair) { + key = resolveSeq.toJSON(pair.key, '', ctx); + value = resolveSeq.toJSON(pair.value, key, ctx); + } else { + key = resolveSeq.toJSON(pair, '', ctx); + } + + if (map.has(key)) throw new Error('Ordered maps must not include duplicate keys'); + map.set(key, value); + } + + return map; + } + +} + +PlainValue._defineProperty(YAMLOMap, "tag", 'tag:yaml.org,2002:omap'); + +function parseOMap(doc, cst) { + const pairs = parsePairs(doc, cst); + const seenKeys = []; + + for (const { + key + } of pairs.items) { + if (key instanceof resolveSeq.Scalar) { + if (seenKeys.includes(key.value)) { + const msg = 'Ordered maps must not include duplicate keys'; + throw new PlainValue.YAMLSemanticError(cst, msg); + } else { + seenKeys.push(key.value); + } + } + } + + return Object.assign(new YAMLOMap(), pairs); +} + +function createOMap(schema, iterable, ctx) { + const pairs = createPairs(schema, iterable, ctx); + const omap = new YAMLOMap(); + omap.items = pairs.items; + return omap; +} + +const omap = { + identify: value => value instanceof Map, + nodeClass: YAMLOMap, + default: false, + tag: 'tag:yaml.org,2002:omap', + resolve: parseOMap, + createNode: createOMap +}; + +class YAMLSet extends resolveSeq.YAMLMap { + constructor() { + super(); + this.tag = YAMLSet.tag; + } + + add(key) { + const pair = key instanceof resolveSeq.Pair ? key : new resolveSeq.Pair(key); + const prev = resolveSeq.findPair(this.items, pair.key); + if (!prev) this.items.push(pair); + } + + get(key, keepPair) { + const pair = resolveSeq.findPair(this.items, key); + return !keepPair && pair instanceof resolveSeq.Pair ? pair.key instanceof resolveSeq.Scalar ? pair.key.value : pair.key : pair; + } + + set(key, value) { + if (typeof value !== 'boolean') throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); + const prev = resolveSeq.findPair(this.items, key); + + if (prev && !value) { + this.items.splice(this.items.indexOf(prev), 1); + } else if (!prev && value) { + this.items.push(new resolveSeq.Pair(key)); + } + } + + toJSON(_, ctx) { + return super.toJSON(_, ctx, Set); + } + + toString(ctx, onComment, onChompKeep) { + if (!ctx) return JSON.stringify(this); + if (this.hasAllNullValues()) return super.toString(ctx, onComment, onChompKeep);else throw new Error('Set items must all have null values'); + } + +} + +PlainValue._defineProperty(YAMLSet, "tag", 'tag:yaml.org,2002:set'); + +function parseSet(doc, cst) { + const map = resolveSeq.resolveMap(doc, cst); + if (!map.hasAllNullValues()) throw new PlainValue.YAMLSemanticError(cst, 'Set items must all have null values'); + return Object.assign(new YAMLSet(), map); +} + +function createSet(schema, iterable, ctx) { + const set = new YAMLSet(); + + for (const value of iterable) set.items.push(schema.createPair(value, null, ctx)); + + return set; +} + +const set = { + identify: value => value instanceof Set, + nodeClass: YAMLSet, + default: false, + tag: 'tag:yaml.org,2002:set', + resolve: parseSet, + createNode: createSet +}; + +const parseSexagesimal = (sign, parts) => { + const n = parts.split(':').reduce((n, p) => n * 60 + Number(p), 0); + return sign === '-' ? -n : n; +}; // hhhh:mm:ss.sss + + +const stringifySexagesimal = ({ + value +}) => { + if (isNaN(value) || !isFinite(value)) return resolveSeq.stringifyNumber(value); + let sign = ''; + + if (value < 0) { + sign = '-'; + value = Math.abs(value); + } + + const parts = [value % 60]; // seconds, including ms + + if (value < 60) { + parts.unshift(0); // at least one : is required + } else { + value = Math.round((value - parts[0]) / 60); + parts.unshift(value % 60); // minutes + + if (value >= 60) { + value = Math.round((value - parts[0]) / 60); + parts.unshift(value); // hours + } + } + + return sign + parts.map(n => n < 10 ? '0' + String(n) : String(n)).join(':').replace(/000000\d*$/, '') // % 60 may introduce error + ; +}; + +const intTime = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'TIME', + test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+)$/, + resolve: (str, sign, parts) => parseSexagesimal(sign, parts.replace(/_/g, '')), + stringify: stringifySexagesimal +}; +const floatTime = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'TIME', + test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*)$/, + resolve: (str, sign, parts) => parseSexagesimal(sign, parts.replace(/_/g, '')), + stringify: stringifySexagesimal +}; +const timestamp = { + identify: value => value instanceof Date, + default: true, + tag: 'tag:yaml.org,2002:timestamp', + // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part + // may be omitted altogether, resulting in a date format. In such a case, the time part is + // assumed to be 00:00:00Z (start of day, UTC). + test: RegExp('^(?:' + '([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd + '(?:(?:t|T|[ \\t]+)' + // t | T | whitespace + '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? + '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 + ')?' + ')$'), + resolve: (str, year, month, day, hour, minute, second, millisec, tz) => { + if (millisec) millisec = (millisec + '00').substr(1, 3); + let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec || 0); + + if (tz && tz !== 'Z') { + let d = parseSexagesimal(tz[0], tz.slice(1)); + if (Math.abs(d) < 30) d *= 60; + date -= 60000 * d; + } + + return new Date(date); + }, + stringify: ({ + value + }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') +}; + +/* global console, process, YAML_SILENCE_DEPRECATION_WARNINGS, YAML_SILENCE_WARNINGS */ +function shouldWarn(deprecation) { + const env = typeof process !== 'undefined' && process.env || {}; + + if (deprecation) { + if (typeof YAML_SILENCE_DEPRECATION_WARNINGS !== 'undefined') return !YAML_SILENCE_DEPRECATION_WARNINGS; + return !env.YAML_SILENCE_DEPRECATION_WARNINGS; + } + + if (typeof YAML_SILENCE_WARNINGS !== 'undefined') return !YAML_SILENCE_WARNINGS; + return !env.YAML_SILENCE_WARNINGS; +} + +function warn(warning, type) { + if (shouldWarn(false)) { + const emit = typeof process !== 'undefined' && process.emitWarning; // This will throw in Jest if `warning` is an Error instance due to + // https://github.com/facebook/jest/issues/2549 + + if (emit) emit(warning, type);else { + // eslint-disable-next-line no-console + console.warn(type ? `${type}: ${warning}` : warning); + } + } +} +function warnFileDeprecation(filename) { + if (shouldWarn(true)) { + const path = filename.replace(/.*yaml[/\\]/i, '').replace(/\.js$/, '').replace(/\\/g, '/'); + warn(`The endpoint 'yaml/${path}' will be removed in a future release.`, 'DeprecationWarning'); + } +} +const warned = {}; +function warnOptionDeprecation(name, alternative) { + if (!warned[name] && shouldWarn(true)) { + warned[name] = true; + let msg = `The option '${name}' will be removed in a future release`; + msg += alternative ? `, use '${alternative}' instead.` : '.'; + warn(msg, 'DeprecationWarning'); + } +} + +exports.binary = binary; +exports.floatTime = floatTime; +exports.intTime = intTime; +exports.omap = omap; +exports.pairs = pairs; +exports.set = set; +exports.timestamp = timestamp; +exports.warn = warn; +exports.warnFileDeprecation = warnFileDeprecation; +exports.warnOptionDeprecation = warnOptionDeprecation; + + +/***/ }), + +/***/ 552: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(65).YAML + + +/***/ }), + +/***/ 357: +/***/ ((module) => { + +"use strict"; +module.exports = require("assert");; + +/***/ }), + +/***/ 129: +/***/ ((module) => { + +"use strict"; +module.exports = require("child_process");; + +/***/ }), + +/***/ 417: +/***/ ((module) => { + +"use strict"; +module.exports = require("crypto");; + +/***/ }), + +/***/ 614: +/***/ ((module) => { + +"use strict"; +module.exports = require("events");; + +/***/ }), + +/***/ 747: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs");; + +/***/ }), + +/***/ 605: +/***/ ((module) => { + +"use strict"; +module.exports = require("http");; + +/***/ }), + +/***/ 211: +/***/ ((module) => { + +"use strict"; +module.exports = require("https");; + +/***/ }), + +/***/ 631: +/***/ ((module) => { + +"use strict"; +module.exports = require("net");; + +/***/ }), + +/***/ 87: +/***/ ((module) => { + +"use strict"; +module.exports = require("os");; + +/***/ }), + +/***/ 622: +/***/ ((module) => { + +"use strict"; +module.exports = require("path");; + +/***/ }), + +/***/ 413: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream");; + +/***/ }), + +/***/ 16: +/***/ ((module) => { + +"use strict"; +module.exports = require("tls");; + +/***/ }), + +/***/ 669: +/***/ ((module) => { + +"use strict"; +module.exports = require("util");; + +/***/ }) + +/******/ }); +/************************************************************************/ +/******/ // The module cache +/******/ var __webpack_module_cache__ = {}; +/******/ +/******/ // The require function +/******/ function __nccwpck_require__(moduleId) { +/******/ // Check if module is in cache +/******/ if(__webpack_module_cache__[moduleId]) { +/******/ return __webpack_module_cache__[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = __webpack_module_cache__[moduleId] = { +/******/ // no module.id needed +/******/ // no module.loaded needed +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ var threw = true; +/******/ try { +/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); +/******/ threw = false; +/******/ } finally { +/******/ if(threw) delete __webpack_module_cache__[moduleId]; +/******/ } +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/************************************************************************/ +/******/ /* webpack/runtime/compat */ +/******/ +/******/ __nccwpck_require__.ab = __dirname + "/";/************************************************************************/ +/******/ // module exports must be returned from runtime so entry inlining is disabled +/******/ // startup +/******/ // Load entry module and return exports +/******/ return __nccwpck_require__(109); +/******/ })() +; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/index.js.map b/dist/index.js.map new file mode 100644 index 0000000..bc8aa40 --- /dev/null +++ b/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../webpack://debugger-action/./lib/frpc.js","../webpack://debugger-action/./lib/main.js","../webpack://debugger-action/./lib/ngrok.js","../webpack://debugger-action/./lib/sshd.js","../webpack://debugger-action/./lib/utils.js","../webpack://debugger-action/./node_modules/@actions/core/lib/command.js","../webpack://debugger-action/./node_modules/@actions/core/lib/core.js","../webpack://debugger-action/./node_modules/@actions/core/lib/file-command.js","../webpack://debugger-action/./node_modules/@actions/core/lib/utils.js","../webpack://debugger-action/./node_modules/@actions/exec/lib/exec.js","../webpack://debugger-action/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://debugger-action/./node_modules/@actions/http-client/index.js","../webpack://debugger-action/./node_modules/@actions/http-client/proxy.js","../webpack://debugger-action/./node_modules/@actions/io/lib/io-util.js","../webpack://debugger-action/./node_modules/@actions/io/lib/io.js","../webpack://debugger-action/./node_modules/@actions/tool-cache/lib/manifest.js","../webpack://debugger-action/./node_modules/@actions/tool-cache/lib/retry-helper.js","../webpack://debugger-action/./node_modules/@actions/tool-cache/lib/tool-cache.js","../webpack://debugger-action/./node_modules/@actions/tool-cache/node_modules/semver/semver.js","../webpack://debugger-action/./node_modules/@actions/tool-cache/node_modules/uuid/lib/bytesToUuid.js","../webpack://debugger-action/./node_modules/@actions/tool-cache/node_modules/uuid/lib/rng.js","../webpack://debugger-action/./node_modules/@actions/tool-cache/node_modules/uuid/v4.js","../webpack://debugger-action/./node_modules/ini/ini.js","../webpack://debugger-action/./node_modules/tunnel/index.js","../webpack://debugger-action/./node_modules/tunnel/lib/tunnel.js","../webpack://debugger-action/./node_modules/yaml/dist/Document-2cf6b08c.js","../webpack://debugger-action/./node_modules/yaml/dist/PlainValue-ec8e588e.js","../webpack://debugger-action/./node_modules/yaml/dist/Schema-42e9705c.js","../webpack://debugger-action/./node_modules/yaml/dist/index.js","../webpack://debugger-action/./node_modules/yaml/dist/parse-cst.js","../webpack://debugger-action/./node_modules/yaml/dist/resolveSeq-4a68b39b.js","../webpack://debugger-action/./node_modules/yaml/dist/warnings-39684f17.js","../webpack://debugger-action/./node_modules/yaml/index.js","../webpack://debugger-action/external \"assert\"","../webpack://debugger-action/external \"child_process\"","../webpack://debugger-action/external \"crypto\"","../webpack://debugger-action/external \"events\"","../webpack://debugger-action/external \"fs\"","../webpack://debugger-action/external \"http\"","../webpack://debugger-action/external \"https\"","../webpack://debugger-action/external \"net\"","../webpack://debugger-action/external \"os\"","../webpack://debugger-action/external \"path\"","../webpack://debugger-action/external \"stream\"","../webpack://debugger-action/external \"tls\"","../webpack://debugger-action/external \"util\"","../webpack://debugger-action/webpack/bootstrap","../webpack://debugger-action/webpack/runtime/compat","../webpack://debugger-action/webpack/startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.frp = void 0;\nconst util = __importStar(require(\"util\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nconst fs_1 = require(\"fs\");\nconst toolCache = __importStar(require(\"@actions/tool-cache\"));\nconst util_1 = require(\"util\");\nconst ini_1 = __importDefault(require(\"ini\"));\nconst utils_1 = require(\"./utils\");\nconst writeFileAsync = util_1.promisify(fs_1.writeFile);\nconst name = 'frpc';\nconst defaultVersion = '0.34.3';\nconst fileSufix = utils_1.getArchivedExtension();\nconst downloadUrlScheme = 'https://github.com/fatedier/frp/releases/download/v%s/%s%s';\nfunction getFullName(version) {\n return util.format('frp_%s_%s_amd64', version, utils_1.getOsType());\n}\nfunction getExecPath(version) {\n return __awaiter(this, void 0, void 0, function* () {\n const downloadUrl = util.format(downloadUrlScheme, version, getFullName(version), fileSufix);\n const localPath = yield utils_1.downloadCache(downloadUrl, name, version, getFullName(version) + fileSufix);\n const execPath = yield toolCache.extractTar(localPath);\n return execPath;\n });\n}\nfunction writeInit(path, frp_server_addr, frp_server_port, frp_token, ssh_port) {\n return __awaiter(this, void 0, void 0, function* () {\n const config = Object();\n config['common'] = {\n server_addr: frp_server_addr,\n server_port: frp_server_port,\n token: frp_token\n };\n const ssh_port_special = util.format('ssh-%s', ssh_port);\n config[ssh_port_special] = {\n type: 'tcp',\n local_ip: '127.0.0.1',\n local_port: 8000,\n remote_port: ssh_port\n };\n yield writeFileAsync(path, ini_1.default.stringify(config));\n return path;\n });\n}\nfunction frp(frp_server_addr, frp_server_port, frp_token, ssh_port) {\n return __awaiter(this, void 0, void 0, function* () {\n const version = defaultVersion;\n const execPath = yield getExecPath(version);\n const initFile = util.format('%s/%s/my.ini', execPath, getFullName(version));\n yield writeInit(initFile, frp_server_addr, frp_server_port, frp_token, ssh_port);\n const cmdList = [\n util.format('%s/%s/%s -c %s', execPath, getFullName(version), name, initFile)\n ];\n return new Promise(resolve => {\n ;\n (function () {\n return __awaiter(this, void 0, void 0, function* () {\n for (const item of cmdList) {\n yield exec.exec(item);\n }\n });\n })();\n setTimeout(() => resolve('exec done!'), 1000);\n });\n });\n}\nexports.frp = frp;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst frpc_1 = require(\"./frpc\");\nconst ngrok_1 = require(\"./ngrok\");\nconst sshd_1 = require(\"./sshd\");\nfunction run() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const frp_server_addr = core.getInput('frp_server_addr');\n const frp_server_port = core.getInput('frp_server_port');\n const frp_token = core.getInput('frp_token');\n const ssh_port = core.getInput('ssh_port');\n const ngrok_token = core.getInput('ngrok_token');\n yield sshd_1.sshd();\n if (frp_token !== '') {\n yield frpc_1.frp(frp_server_addr, frp_server_port, frp_token, ssh_port);\n }\n if (ngrok_token !== '') {\n yield ngrok_1.ngrok(ngrok_token);\n }\n }\n catch (error) {\n core.setFailed(error.message);\n }\n });\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ngrok = void 0;\nconst util = __importStar(require(\"util\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nconst core = __importStar(require(\"@actions/core\"));\nconst toolCache = __importStar(require(\"@actions/tool-cache\"));\nconst utils_1 = require(\"./utils\");\nconst yaml_1 = __importDefault(require(\"yaml\"));\nconst fs_1 = require(\"fs\");\nconst util_1 = require(\"util\");\nconst writeFileAsync = util_1.promisify(fs_1.writeFile);\nconst name = 'ngrok';\nconst defaultVersion = '4VmDzA7iaHb';\nconst fileSufix = '.zip';\nconst downloadUrlScheme = 'https://bin.equinox.io/c/%s/%s%s';\nfunction getFullName() {\n return util.format('ngrok-stable-%s-amd64', utils_1.getOsType());\n}\nfunction writeTunnel(path, token) {\n return __awaiter(this, void 0, void 0, function* () {\n const config = Object();\n config['authtoken'] = token;\n config['tunnels'] = {\n 'tcp-8000': {\n addr: '8000',\n proto: 'tcp'\n }\n };\n const addr_1 = core.getInput('ngrok_addr_1');\n const proto_1 = core.getInput('ngrok_proto_1');\n const addr_2 = core.getInput('ngrok_addr_2');\n const proto_2 = core.getInput('ngrok_proto_2');\n const addr_3 = core.getInput('ngrok_addr_3');\n const proto_3 = core.getInput('ngrok_proto_3');\n if (addr_1 !== '' && proto_1 !== '') {\n const key_1 = util.format('%s-%s', proto_1, addr_1);\n config['tunnels'][key_1] = {\n addr: addr_1,\n proto: proto_1\n };\n }\n if (addr_2 !== '' && proto_2 !== '') {\n const key_2 = util.format('%s-%s', proto_2, addr_2);\n config['tunnels'][key_2] = {\n addr: addr_2,\n proto: proto_2\n };\n }\n if (addr_3 !== '' && proto_3 !== '') {\n const key_3 = util.format('%s-%s', proto_3, addr_3);\n config['tunnels'][key_3] = {\n addr: addr_3,\n proto: proto_3\n };\n }\n yield writeFileAsync(path, yaml_1.default.stringify(config));\n return path;\n });\n}\nfunction getExecPath(version) {\n return __awaiter(this, void 0, void 0, function* () {\n const downloadUrl = util.format(downloadUrlScheme, version, getFullName(), fileSufix);\n const localPath = yield utils_1.downloadCache(downloadUrl, name, version, getFullName() + fileSufix);\n const execPath = yield toolCache.extractZip(localPath);\n return execPath;\n });\n}\nfunction ngrok(NGROK_TOKEN) {\n return __awaiter(this, void 0, void 0, function* () {\n const version = defaultVersion;\n const execPath = yield getExecPath(version);\n const cfgFile = util.format('%s/ngrok.cfg', execPath);\n writeTunnel(cfgFile, NGROK_TOKEN);\n const cmdList = [\n util.format('chmod +x %s/ngrok', execPath),\n util.format('%s/ngrok start --all --config %s --log \"stdout\"', execPath, cfgFile)\n ];\n return new Promise(resolve => {\n ;\n (function () {\n return __awaiter(this, void 0, void 0, function* () {\n for (const item of cmdList) {\n yield exec.exec(item);\n }\n });\n })();\n setTimeout(() => resolve('exec done!'), 1000);\n });\n });\n}\nexports.ngrok = ngrok;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.sshd = void 0;\nconst util = __importStar(require(\"util\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nconst toolCache = __importStar(require(\"@actions/tool-cache\"));\nconst utils_1 = require(\"./utils\");\nconst name = 'sshd';\nconst defaultVersion = '2.5.1';\nconst fileSufix = utils_1.getArchivedExtension();\nconst downloadUrlScheme = 'https://archive.apache.org/dist/mina/sshd/%s/%s%s';\nfunction getFullName(version) {\n return util.format('apache-sshd-%s', version);\n}\nfunction getExecPath(version) {\n return __awaiter(this, void 0, void 0, function* () {\n const downloadUrl = util.format(downloadUrlScheme, version, getFullName(version), fileSufix);\n const localPath = yield utils_1.downloadCache(downloadUrl, name, version, getFullName(version) + fileSufix);\n const execPath = yield toolCache.extractTar(localPath);\n return execPath;\n });\n}\nfunction sshd() {\n return __awaiter(this, void 0, void 0, function* () {\n const version = defaultVersion;\n const execPath = yield getExecPath(version);\n const cmdList = [\n util.format('%s/%s/bin/%s', execPath, getFullName(version), name + utils_1.getBashExtension())\n ];\n return new Promise(resolve => {\n ;\n (function () {\n return __awaiter(this, void 0, void 0, function* () {\n for (const item of cmdList) {\n yield exec.exec(item);\n }\n });\n })();\n setTimeout(() => resolve('exec done!'), 1000);\n });\n });\n}\nexports.sshd = sshd;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.execCmdList = exports.downloadCache = exports.getOsType = exports.getArchivedExtension = exports.getBashExtension = exports.getExecutableExtension = void 0;\nconst os = __importStar(require(\"os\"));\nconst toolCache = __importStar(require(\"@actions/tool-cache\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nfunction getExecutableExtension() {\n if (os.type().match(/^Win/)) {\n return '.exe';\n }\n return '';\n}\nexports.getExecutableExtension = getExecutableExtension;\nfunction getBashExtension() {\n if (os.type().match(/^Win/)) {\n return '.bat';\n }\n return '.sh';\n}\nexports.getBashExtension = getBashExtension;\nfunction getArchivedExtension() {\n if (os.type().match(/^Win/)) {\n return '.zip';\n }\n return '.tar.gz';\n}\nexports.getArchivedExtension = getArchivedExtension;\nfunction getOsType() {\n if (os.type().match(/^Win/)) {\n // Windows_NT\n return 'windows';\n }\n return os.type().toLocaleLowerCase();\n}\nexports.getOsType = getOsType;\nfunction downloadCache(downloadUrl, name, version, fullName) {\n return __awaiter(this, void 0, void 0, function* () {\n let cachePath = toolCache.find(name, version);\n if (!cachePath) {\n try {\n cachePath = yield toolCache.downloadTool(downloadUrl);\n }\n catch (exception) {\n throw new Error('DownloadFailed');\n }\n yield toolCache.cacheFile(cachePath, fullName, name, version);\n }\n return cachePath;\n });\n}\nexports.downloadCache = downloadCache;\nfunction execCmdList(cmdList) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise(resolve => {\n ;\n (function () {\n return __awaiter(this, void 0, void 0, function* () {\n for (const item of cmdList) {\n yield exec.exec(item);\n }\n });\n })();\n setTimeout(() => resolve('exec done!'), 1000);\n });\n });\n}\nexports.execCmdList = execCmdList;\n","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input. The value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n */\nfunction error(message) {\n command_1.issue('error', message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n */\nfunction warning(message) {\n command_1.issue('warning', message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n strBuffer = s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n const stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n const errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n });\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst http = require(\"http\");\nconst https = require(\"https\");\nconst pm = require(\"./proxy\");\nlet tunnel;\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n let proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return new Promise(async (resolve, reject) => {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n let parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n }\n get(requestUrl, additionalHeaders) {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n }\n del(requestUrl, additionalHeaders) {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n }\n post(requestUrl, data, additionalHeaders) {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n }\n patch(requestUrl, data, additionalHeaders) {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n }\n put(requestUrl, data, additionalHeaders) {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n }\n head(requestUrl, additionalHeaders) {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n async getJson(requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n let res = await this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async postJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async putJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async patchJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n async request(verb, requestUrl, data, headers) {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n let parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n while (numTries < maxTries) {\n response = await this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (let i = 0; i < this.handlers.length; i++) {\n if (this.handlers[i].canHandleAuthentication(response)) {\n authenticationHandler = this.handlers[i];\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n let parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol == 'https:' &&\n parsedUrl.protocol != parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n await response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (let header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = await this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n await response.readBody();\n await this._performExponentialBackoff(numTries);\n }\n }\n return response;\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return new Promise((resolve, reject) => {\n let callbackForResult = function (err, res) {\n if (err) {\n reject(err);\n }\n resolve(res);\n };\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n let socket;\n if (typeof data === 'string') {\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n let handleResult = (err, res) => {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n };\n let req = info.httpModule.request(info.options, (msg) => {\n let res = new HttpClientResponse(msg);\n handleResult(null, res);\n });\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error('Request timeout: ' + info.options.path), null);\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err, null);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n let parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n this.handlers.forEach(handler => {\n handler.prepareRequest(info.options);\n });\n }\n return info;\n }\n _mergeHeaders(headers) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n let proxyUrl = pm.getProxyUrl(parsedUrl);\n let useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (!!agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (!!this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n if (useProxy) {\n // If using proxy, need tunnel\n if (!tunnel) {\n tunnel = require('tunnel');\n }\n const agentOptions = {\n maxSockets: maxSockets,\n keepAlive: this._keepAlive,\n proxy: {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,\n host: proxyUrl.hostname,\n port: proxyUrl.port\n }\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n }\n static dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n let a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n async _processResponse(res, options) {\n return new Promise(async (resolve, reject) => {\n const statusCode = res.message.statusCode;\n const response = {\n statusCode: statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode == HttpCodes.NotFound) {\n resolve(response);\n }\n let obj;\n let contents;\n // get the result from the body\n try {\n contents = await res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = 'Failed request: (' + statusCode + ')';\n }\n let err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n });\n }\n}\nexports.HttpClient = HttpClient;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction getProxyUrl(reqUrl) {\n let usingSsl = reqUrl.protocol === 'https:';\n let proxyUrl;\n if (checkBypass(reqUrl)) {\n return proxyUrl;\n }\n let proxyVar;\n if (usingSsl) {\n proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n if (proxyVar) {\n proxyUrl = new URL(proxyVar);\n }\n return proxyUrl;\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n let upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (let upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst assert_1 = require(\"assert\");\nconst fs = require(\"fs\");\nconst path = require(\"path\");\n_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\nexports.IS_WINDOWS = process.platform === 'win32';\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Recursively create a directory at `fsPath`.\n *\n * This implementation is optimistic, meaning it attempts to create the full\n * path first, and backs up the path stack from there.\n *\n * @param fsPath The path to create\n * @param maxDepth The maximum recursion depth\n * @param depth The current recursion depth\n */\nfunction mkdirP(fsPath, maxDepth = 1000, depth = 1) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n fsPath = path.resolve(fsPath);\n if (depth >= maxDepth)\n return exports.mkdir(fsPath);\n try {\n yield exports.mkdir(fsPath);\n return;\n }\n catch (err) {\n switch (err.code) {\n case 'ENOENT': {\n yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);\n yield exports.mkdir(fsPath);\n return;\n }\n default: {\n let stats;\n try {\n stats = yield exports.stat(fsPath);\n }\n catch (err2) {\n throw err;\n }\n if (!stats.isDirectory())\n throw err;\n }\n }\n }\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst childProcess = require(\"child_process\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst ioUtil = require(\"./io-util\");\nconst exec = util_1.promisify(childProcess.exec);\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory()\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another\n // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.\n try {\n if (yield ioUtil.isDirectory(inputPath, true)) {\n yield exec(`rd /s /q \"${inputPath}\"`);\n }\n else {\n yield exec(`del /f /a \"${inputPath}\"`);\n }\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n // Shelling out fails to remove a symlink folder with missing source, this unlink catches that\n try {\n yield ioUtil.unlink(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n }\n else {\n let isDir = false;\n try {\n isDir = yield ioUtil.isDirectory(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n return;\n }\n if (isDir) {\n yield exec(`rm -rf \"${inputPath}\"`);\n }\n else {\n yield ioUtil.unlink(inputPath);\n }\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n yield ioUtil.mkdirP(fsPath);\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n }\n try {\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {\n for (const extension of process.env.PATHEXT.split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return filePath;\n }\n return '';\n }\n // if any path separators, return empty\n if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\\\'))) {\n return '';\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // return the first match\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);\n if (filePath) {\n return filePath;\n }\n }\n return '';\n }\n catch (err) {\n throw new Error(`which failed with message ${err.message}`);\n }\n });\n}\nexports.which = which;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n return { force, recursive };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst semver = __importStar(require(\"semver\"));\nconst core_1 = require(\"@actions/core\");\n// needs to be require for core node modules to be mocked\n/* eslint @typescript-eslint/no-require-imports: 0 */\nconst os = require(\"os\");\nconst cp = require(\"child_process\");\nconst fs = require(\"fs\");\nfunction _findMatch(versionSpec, stable, candidates, archFilter) {\n return __awaiter(this, void 0, void 0, function* () {\n const platFilter = os.platform();\n let result;\n let match;\n let file;\n for (const candidate of candidates) {\n const version = candidate.version;\n core_1.debug(`check ${version} satisfies ${versionSpec}`);\n if (semver.satisfies(version, versionSpec) &&\n (!stable || candidate.stable === stable)) {\n file = candidate.files.find(item => {\n core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);\n let chk = item.arch === archFilter && item.platform === platFilter;\n if (chk && item.platform_version) {\n const osVersion = module.exports._getOsVersion();\n if (osVersion === item.platform_version) {\n chk = true;\n }\n else {\n chk = semver.satisfies(osVersion, item.platform_version);\n }\n }\n return chk;\n });\n if (file) {\n core_1.debug(`matched ${candidate.version}`);\n match = candidate;\n break;\n }\n }\n }\n if (match && file) {\n // clone since we're mutating the file list to be only the file that matches\n result = Object.assign({}, match);\n result.files = [file];\n }\n return result;\n });\n}\nexports._findMatch = _findMatch;\nfunction _getOsVersion() {\n // TODO: add windows and other linux, arm variants\n // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)\n const plat = os.platform();\n let version = '';\n if (plat === 'darwin') {\n version = cp.execSync('sw_vers -productVersion').toString();\n }\n else if (plat === 'linux') {\n // lsb_release process not in some containers, readfile\n // Run cat /etc/lsb-release\n // DISTRIB_ID=Ubuntu\n // DISTRIB_RELEASE=18.04\n // DISTRIB_CODENAME=bionic\n // DISTRIB_DESCRIPTION=\"Ubuntu 18.04.4 LTS\"\n const lsbContents = module.exports._readLinuxVersionFile();\n if (lsbContents) {\n const lines = lsbContents.split('\\n');\n for (const line of lines) {\n const parts = line.split('=');\n if (parts.length === 2 && parts[0].trim() === 'DISTRIB_RELEASE') {\n version = parts[1].trim();\n break;\n }\n }\n }\n }\n return version;\n}\nexports._getOsVersion = _getOsVersion;\nfunction _readLinuxVersionFile() {\n const lsbFile = '/etc/lsb-release';\n let contents = '';\n if (fs.existsSync(lsbFile)) {\n contents = fs.readFileSync(lsbFile).toString();\n }\n return contents;\n}\nexports._readLinuxVersionFile = _readLinuxVersionFile;\n//# sourceMappingURL=manifest.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Internal class for retries\n */\nclass RetryHelper {\n constructor(maxAttempts, minSeconds, maxSeconds) {\n if (maxAttempts < 1) {\n throw new Error('max attempts should be greater than or equal to 1');\n }\n this.maxAttempts = maxAttempts;\n this.minSeconds = Math.floor(minSeconds);\n this.maxSeconds = Math.floor(maxSeconds);\n if (this.minSeconds > this.maxSeconds) {\n throw new Error('min seconds should be less than or equal to max seconds');\n }\n }\n execute(action, isRetryable) {\n return __awaiter(this, void 0, void 0, function* () {\n let attempt = 1;\n while (attempt < this.maxAttempts) {\n // Try\n try {\n return yield action();\n }\n catch (err) {\n if (isRetryable && !isRetryable(err)) {\n throw err;\n }\n core.info(err.message);\n }\n // Sleep\n const seconds = this.getSleepAmount();\n core.info(`Waiting ${seconds} seconds before trying again`);\n yield this.sleep(seconds);\n attempt++;\n }\n // Last attempt\n return yield action();\n });\n }\n getSleepAmount() {\n return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +\n this.minSeconds);\n }\n sleep(seconds) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise(resolve => setTimeout(resolve, seconds * 1000));\n });\n }\n}\nexports.RetryHelper = RetryHelper;\n//# sourceMappingURL=retry-helper.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst fs = __importStar(require(\"fs\"));\nconst mm = __importStar(require(\"./manifest\"));\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst httpm = __importStar(require(\"@actions/http-client\"));\nconst semver = __importStar(require(\"semver\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst v4_1 = __importDefault(require(\"uuid/v4\"));\nconst exec_1 = require(\"@actions/exec/lib/exec\");\nconst assert_1 = require(\"assert\");\nconst retry_helper_1 = require(\"./retry-helper\");\nclass HTTPError extends Error {\n constructor(httpStatusCode) {\n super(`Unexpected HTTP response: ${httpStatusCode}`);\n this.httpStatusCode = httpStatusCode;\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.HTTPError = HTTPError;\nconst IS_WINDOWS = process.platform === 'win32';\nconst IS_MAC = process.platform === 'darwin';\nconst userAgent = 'actions/tool-cache';\n/**\n * Download a tool from an url and stream it into a file\n *\n * @param url url of tool to download\n * @param dest path to download tool\n * @param auth authorization header\n * @returns path to downloaded tool\n */\nfunction downloadTool(url, dest, auth) {\n return __awaiter(this, void 0, void 0, function* () {\n dest = dest || path.join(_getTempDirectory(), v4_1.default());\n yield io.mkdirP(path.dirname(dest));\n core.debug(`Downloading ${url}`);\n core.debug(`Destination ${dest}`);\n const maxAttempts = 3;\n const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);\n const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);\n const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);\n return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {\n return yield downloadToolAttempt(url, dest || '', auth);\n }), (err) => {\n if (err instanceof HTTPError && err.httpStatusCode) {\n // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests\n if (err.httpStatusCode < 500 &&\n err.httpStatusCode !== 408 &&\n err.httpStatusCode !== 429) {\n return false;\n }\n }\n // Otherwise retry\n return true;\n });\n });\n}\nexports.downloadTool = downloadTool;\nfunction downloadToolAttempt(url, dest, auth) {\n return __awaiter(this, void 0, void 0, function* () {\n if (fs.existsSync(dest)) {\n throw new Error(`Destination file path ${dest} already exists`);\n }\n // Get the response headers\n const http = new httpm.HttpClient(userAgent, [], {\n allowRetries: false\n });\n let headers;\n if (auth) {\n core.debug('set auth');\n headers = {\n authorization: auth\n };\n }\n const response = yield http.get(url, headers);\n if (response.message.statusCode !== 200) {\n const err = new HTTPError(response.message.statusCode);\n core.debug(`Failed to download from \"${url}\". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);\n throw err;\n }\n // Download the response body\n const pipeline = util.promisify(stream.pipeline);\n const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);\n const readStream = responseMessageFactory();\n let succeeded = false;\n try {\n yield pipeline(readStream, fs.createWriteStream(dest));\n core.debug('download complete');\n succeeded = true;\n return dest;\n }\n finally {\n // Error, delete dest before retry\n if (!succeeded) {\n core.debug('download failed');\n try {\n yield io.rmRF(dest);\n }\n catch (err) {\n core.debug(`Failed to delete '${dest}'. ${err.message}`);\n }\n }\n }\n });\n}\n/**\n * Extract a .7z file\n *\n * @param file path to the .7z file\n * @param dest destination directory. Optional.\n * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this\n * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will\n * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is\n * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line\n * interface, it is smaller than the full command line interface, and it does support long paths. At the\n * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.\n * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path\n * to 7zr.exe can be pass to this function.\n * @returns path to the destination directory\n */\nfunction extract7z(file, dest, _7zPath) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');\n assert_1.ok(file, 'parameter \"file\" is required');\n dest = yield _createExtractFolder(dest);\n const originalCwd = process.cwd();\n process.chdir(dest);\n if (_7zPath) {\n try {\n const logLevel = core.isDebug() ? '-bb1' : '-bb0';\n const args = [\n 'x',\n logLevel,\n '-bd',\n '-sccUTF-8',\n file\n ];\n const options = {\n silent: true\n };\n yield exec_1.exec(`\"${_7zPath}\"`, args, options);\n }\n finally {\n process.chdir(originalCwd);\n }\n }\n else {\n const escapedScript = path\n .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')\n .replace(/'/g, \"''\")\n .replace(/\"|\\n|\\r/g, ''); // double-up single quotes, remove double quotes and newlines\n const escapedFile = file.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const escapedTarget = dest.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;\n const args = [\n '-NoLogo',\n '-Sta',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n command\n ];\n const options = {\n silent: true\n };\n try {\n const powershellPath = yield io.which('powershell', true);\n yield exec_1.exec(`\"${powershellPath}\"`, args, options);\n }\n finally {\n process.chdir(originalCwd);\n }\n }\n return dest;\n });\n}\nexports.extract7z = extract7z;\n/**\n * Extract a compressed tar archive\n *\n * @param file path to the tar\n * @param dest destination directory. Optional.\n * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.\n * @returns path to the destination directory\n */\nfunction extractTar(file, dest, flags = 'xz') {\n return __awaiter(this, void 0, void 0, function* () {\n if (!file) {\n throw new Error(\"parameter 'file' is required\");\n }\n // Create dest\n dest = yield _createExtractFolder(dest);\n // Determine whether GNU tar\n core.debug('Checking tar --version');\n let versionOutput = '';\n yield exec_1.exec('tar --version', [], {\n ignoreReturnCode: true,\n silent: true,\n listeners: {\n stdout: (data) => (versionOutput += data.toString()),\n stderr: (data) => (versionOutput += data.toString())\n }\n });\n core.debug(versionOutput.trim());\n const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');\n // Initialize args\n let args;\n if (flags instanceof Array) {\n args = flags;\n }\n else {\n args = [flags];\n }\n if (core.isDebug() && !flags.includes('v')) {\n args.push('-v');\n }\n let destArg = dest;\n let fileArg = file;\n if (IS_WINDOWS && isGnuTar) {\n args.push('--force-local');\n destArg = dest.replace(/\\\\/g, '/');\n // Technically only the dest needs to have `/` but for aesthetic consistency\n // convert slashes in the file arg too.\n fileArg = file.replace(/\\\\/g, '/');\n }\n if (isGnuTar) {\n // Suppress warnings when using GNU tar to extract archives created by BSD tar\n args.push('--warning=no-unknown-keyword');\n }\n args.push('-C', destArg, '-f', fileArg);\n yield exec_1.exec(`tar`, args);\n return dest;\n });\n}\nexports.extractTar = extractTar;\n/**\n * Extract a xar compatible archive\n *\n * @param file path to the archive\n * @param dest destination directory. Optional.\n * @param flags flags for the xar. Optional.\n * @returns path to the destination directory\n */\nfunction extractXar(file, dest, flags = []) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(IS_MAC, 'extractXar() not supported on current OS');\n assert_1.ok(file, 'parameter \"file\" is required');\n dest = yield _createExtractFolder(dest);\n let args;\n if (flags instanceof Array) {\n args = flags;\n }\n else {\n args = [flags];\n }\n args.push('-x', '-C', dest, '-f', file);\n if (core.isDebug()) {\n args.push('-v');\n }\n const xarPath = yield io.which('xar', true);\n yield exec_1.exec(`\"${xarPath}\"`, _unique(args));\n return dest;\n });\n}\nexports.extractXar = extractXar;\n/**\n * Extract a zip\n *\n * @param file path to the zip\n * @param dest destination directory. Optional.\n * @returns path to the destination directory\n */\nfunction extractZip(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!file) {\n throw new Error(\"parameter 'file' is required\");\n }\n dest = yield _createExtractFolder(dest);\n if (IS_WINDOWS) {\n yield extractZipWin(file, dest);\n }\n else {\n yield extractZipNix(file, dest);\n }\n return dest;\n });\n}\nexports.extractZip = extractZip;\nfunction extractZipWin(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n // build the powershell command\n const escapedFile = file.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, ''); // double-up single quotes, remove double quotes and newlines\n const escapedDest = dest.replace(/'/g, \"''\").replace(/\"|\\n|\\r/g, '');\n const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;\n // run powershell\n const powershellPath = yield io.which('powershell', true);\n const args = [\n '-NoLogo',\n '-Sta',\n '-NoProfile',\n '-NonInteractive',\n '-ExecutionPolicy',\n 'Unrestricted',\n '-Command',\n command\n ];\n yield exec_1.exec(`\"${powershellPath}\"`, args);\n });\n}\nfunction extractZipNix(file, dest) {\n return __awaiter(this, void 0, void 0, function* () {\n const unzipPath = yield io.which('unzip', true);\n const args = [file];\n if (!core.isDebug()) {\n args.unshift('-q');\n }\n yield exec_1.exec(`\"${unzipPath}\"`, args, { cwd: dest });\n });\n}\n/**\n * Caches a directory and installs it into the tool cacheDir\n *\n * @param sourceDir the directory to cache into tools\n * @param tool tool name\n * @param version version of the tool. semver format\n * @param arch architecture of the tool. Optional. Defaults to machine architecture\n */\nfunction cacheDir(sourceDir, tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n version = semver.clean(version) || version;\n arch = arch || os.arch();\n core.debug(`Caching tool ${tool} ${version} ${arch}`);\n core.debug(`source dir: ${sourceDir}`);\n if (!fs.statSync(sourceDir).isDirectory()) {\n throw new Error('sourceDir is not a directory');\n }\n // Create the tool dir\n const destPath = yield _createToolPath(tool, version, arch);\n // copy each child item. do not move. move can fail on Windows\n // due to anti-virus software having an open handle on a file.\n for (const itemName of fs.readdirSync(sourceDir)) {\n const s = path.join(sourceDir, itemName);\n yield io.cp(s, destPath, { recursive: true });\n }\n // write .complete\n _completeToolPath(tool, version, arch);\n return destPath;\n });\n}\nexports.cacheDir = cacheDir;\n/**\n * Caches a downloaded file (GUID) and installs it\n * into the tool cache with a given targetName\n *\n * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.\n * @param targetFile the name of the file name in the tools directory\n * @param tool tool name\n * @param version version of the tool. semver format\n * @param arch architecture of the tool. Optional. Defaults to machine architecture\n */\nfunction cacheFile(sourceFile, targetFile, tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n version = semver.clean(version) || version;\n arch = arch || os.arch();\n core.debug(`Caching tool ${tool} ${version} ${arch}`);\n core.debug(`source file: ${sourceFile}`);\n if (!fs.statSync(sourceFile).isFile()) {\n throw new Error('sourceFile is not a file');\n }\n // create the tool dir\n const destFolder = yield _createToolPath(tool, version, arch);\n // copy instead of move. move can fail on Windows due to\n // anti-virus software having an open handle on a file.\n const destPath = path.join(destFolder, targetFile);\n core.debug(`destination file ${destPath}`);\n yield io.cp(sourceFile, destPath);\n // write .complete\n _completeToolPath(tool, version, arch);\n return destFolder;\n });\n}\nexports.cacheFile = cacheFile;\n/**\n * Finds the path to a tool version in the local installed tool cache\n *\n * @param toolName name of the tool\n * @param versionSpec version of the tool\n * @param arch optional arch. defaults to arch of computer\n */\nfunction find(toolName, versionSpec, arch) {\n if (!toolName) {\n throw new Error('toolName parameter is required');\n }\n if (!versionSpec) {\n throw new Error('versionSpec parameter is required');\n }\n arch = arch || os.arch();\n // attempt to resolve an explicit version\n if (!_isExplicitVersion(versionSpec)) {\n const localVersions = findAllVersions(toolName, arch);\n const match = _evaluateVersions(localVersions, versionSpec);\n versionSpec = match;\n }\n // check for the explicit version in the cache\n let toolPath = '';\n if (versionSpec) {\n versionSpec = semver.clean(versionSpec) || '';\n const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);\n core.debug(`checking cache: ${cachePath}`);\n if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {\n core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);\n toolPath = cachePath;\n }\n else {\n core.debug('not found');\n }\n }\n return toolPath;\n}\nexports.find = find;\n/**\n * Finds the paths to all versions of a tool that are installed in the local tool cache\n *\n * @param toolName name of the tool\n * @param arch optional arch. defaults to arch of computer\n */\nfunction findAllVersions(toolName, arch) {\n const versions = [];\n arch = arch || os.arch();\n const toolPath = path.join(_getCacheDirectory(), toolName);\n if (fs.existsSync(toolPath)) {\n const children = fs.readdirSync(toolPath);\n for (const child of children) {\n if (_isExplicitVersion(child)) {\n const fullPath = path.join(toolPath, child, arch || '');\n if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {\n versions.push(child);\n }\n }\n }\n }\n return versions;\n}\nexports.findAllVersions = findAllVersions;\nfunction getManifestFromRepo(owner, repo, auth, branch = 'master') {\n return __awaiter(this, void 0, void 0, function* () {\n let releases = [];\n const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`;\n const http = new httpm.HttpClient('tool-cache');\n const headers = {};\n if (auth) {\n core.debug('set auth');\n headers.authorization = auth;\n }\n const response = yield http.getJson(treeUrl, headers);\n if (!response.result) {\n return releases;\n }\n let manifestUrl = '';\n for (const item of response.result.tree) {\n if (item.path === 'versions-manifest.json') {\n manifestUrl = item.url;\n break;\n }\n }\n headers['accept'] = 'application/vnd.github.VERSION.raw';\n let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody();\n if (versionsRaw) {\n // shouldn't be needed but protects against invalid json saved with BOM\n versionsRaw = versionsRaw.replace(/^\\uFEFF/, '');\n try {\n releases = JSON.parse(versionsRaw);\n }\n catch (_a) {\n core.debug('Invalid json');\n }\n }\n return releases;\n });\n}\nexports.getManifestFromRepo = getManifestFromRepo;\nfunction findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) {\n return __awaiter(this, void 0, void 0, function* () {\n // wrap the internal impl\n const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter);\n return match;\n });\n}\nexports.findFromManifest = findFromManifest;\nfunction _createExtractFolder(dest) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!dest) {\n // create a temp dir\n dest = path.join(_getTempDirectory(), v4_1.default());\n }\n yield io.mkdirP(dest);\n return dest;\n });\n}\nfunction _createToolPath(tool, version, arch) {\n return __awaiter(this, void 0, void 0, function* () {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n core.debug(`destination ${folderPath}`);\n const markerPath = `${folderPath}.complete`;\n yield io.rmRF(folderPath);\n yield io.rmRF(markerPath);\n yield io.mkdirP(folderPath);\n return folderPath;\n });\n}\nfunction _completeToolPath(tool, version, arch) {\n const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');\n const markerPath = `${folderPath}.complete`;\n fs.writeFileSync(markerPath, '');\n core.debug('finished caching tool');\n}\nfunction _isExplicitVersion(versionSpec) {\n const c = semver.clean(versionSpec) || '';\n core.debug(`isExplicit: ${c}`);\n const valid = semver.valid(c) != null;\n core.debug(`explicit? ${valid}`);\n return valid;\n}\nfunction _evaluateVersions(versions, versionSpec) {\n let version = '';\n core.debug(`evaluating ${versions.length} versions`);\n versions = versions.sort((a, b) => {\n if (semver.gt(a, b)) {\n return 1;\n }\n return -1;\n });\n for (let i = versions.length - 1; i >= 0; i--) {\n const potential = versions[i];\n const satisfied = semver.satisfies(potential, versionSpec);\n if (satisfied) {\n version = potential;\n break;\n }\n }\n if (version) {\n core.debug(`matched: ${version}`);\n }\n else {\n core.debug('match not found');\n }\n return version;\n}\n/**\n * Gets RUNNER_TOOL_CACHE\n */\nfunction _getCacheDirectory() {\n const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';\n assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');\n return cacheDirectory;\n}\n/**\n * Gets RUNNER_TEMP\n */\nfunction _getTempDirectory() {\n const tempDirectory = process.env['RUNNER_TEMP'] || '';\n assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');\n return tempDirectory;\n}\n/**\n * Gets a global variable\n */\nfunction _getGlobal(key, defaultValue) {\n /* eslint-disable @typescript-eslint/no-explicit-any */\n const value = global[key];\n /* eslint-enable @typescript-eslint/no-explicit-any */\n return value !== undefined ? value : defaultValue;\n}\n/**\n * Returns an array of unique values.\n * @param values Values to make unique.\n */\nfunction _unique(values) {\n return Array.from(new Set(values));\n}\n//# sourceMappingURL=tool-cache.js.map","exports = module.exports = SemVer\n\nvar debug\n/* istanbul ignore next */\nif (typeof process === 'object' &&\n process.env &&\n process.env.NODE_DEBUG &&\n /\\bsemver\\b/i.test(process.env.NODE_DEBUG)) {\n debug = function () {\n var args = Array.prototype.slice.call(arguments, 0)\n args.unshift('SEMVER')\n console.log.apply(console, args)\n }\n} else {\n debug = function () {}\n}\n\n// Note: this is the semver.org version of the spec that it implements\n// Not necessarily the package version of this code.\nexports.SEMVER_SPEC_VERSION = '2.0.0'\n\nvar MAX_LENGTH = 256\nvar MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||\n /* istanbul ignore next */ 9007199254740991\n\n// Max safe segment length for coercion.\nvar MAX_SAFE_COMPONENT_LENGTH = 16\n\n// The actual regexps go on exports.re\nvar re = exports.re = []\nvar src = exports.src = []\nvar t = exports.tokens = {}\nvar R = 0\n\nfunction tok (n) {\n t[n] = R++\n}\n\n// The following Regular Expressions can be used for tokenizing,\n// validating, and parsing SemVer version strings.\n\n// ## Numeric Identifier\n// A single `0`, or a non-zero digit followed by zero or more digits.\n\ntok('NUMERICIDENTIFIER')\nsrc[t.NUMERICIDENTIFIER] = '0|[1-9]\\\\d*'\ntok('NUMERICIDENTIFIERLOOSE')\nsrc[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'\n\n// ## Non-numeric Identifier\n// Zero or more digits, followed by a letter or hyphen, and then zero or\n// more letters, digits, or hyphens.\n\ntok('NONNUMERICIDENTIFIER')\nsrc[t.NONNUMERICIDENTIFIER] = '\\\\d*[a-zA-Z-][a-zA-Z0-9-]*'\n\n// ## Main Version\n// Three dot-separated numeric identifiers.\n\ntok('MAINVERSION')\nsrc[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIER] + ')'\n\ntok('MAINVERSIONLOOSE')\nsrc[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\\\.' +\n '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'\n\n// ## Pre-release Version Identifier\n// A numeric identifier, or a non-numeric identifier.\n\ntok('PRERELEASEIDENTIFIER')\nsrc[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\ntok('PRERELEASEIDENTIFIERLOOSE')\nsrc[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +\n '|' + src[t.NONNUMERICIDENTIFIER] + ')'\n\n// ## Pre-release Version\n// Hyphen, followed by one or more dot-separated pre-release version\n// identifiers.\n\ntok('PRERELEASE')\nsrc[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'\n\ntok('PRERELEASELOOSE')\nsrc[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +\n '(?:\\\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'\n\n// ## Build Metadata Identifier\n// Any combination of digits, letters, or hyphens.\n\ntok('BUILDIDENTIFIER')\nsrc[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'\n\n// ## Build Metadata\n// Plus sign, followed by one or more period-separated build metadata\n// identifiers.\n\ntok('BUILD')\nsrc[t.BUILD] = '(?:\\\\+(' + src[t.BUILDIDENTIFIER] +\n '(?:\\\\.' + src[t.BUILDIDENTIFIER] + ')*))'\n\n// ## Full Version String\n// A main version, followed optionally by a pre-release version and\n// build metadata.\n\n// Note that the only major, minor, patch, and pre-release sections of\n// the version string are capturing groups. The build metadata is not a\n// capturing group, because it should not ever be used in version\n// comparison.\n\ntok('FULL')\ntok('FULLPLAIN')\nsrc[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +\n src[t.PRERELEASE] + '?' +\n src[t.BUILD] + '?'\n\nsrc[t.FULL] = '^' + src[t.FULLPLAIN] + '$'\n\n// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.\n// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty\n// common in the npm registry.\ntok('LOOSEPLAIN')\nsrc[t.LOOSEPLAIN] = '[v=\\\\s]*' + src[t.MAINVERSIONLOOSE] +\n src[t.PRERELEASELOOSE] + '?' +\n src[t.BUILD] + '?'\n\ntok('LOOSE')\nsrc[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'\n\ntok('GTLT')\nsrc[t.GTLT] = '((?:<|>)?=?)'\n\n// Something like \"2.*\" or \"1.2.x\".\n// Note that \"x.x\" is a valid xRange identifer, meaning \"any version\"\n// Only the first item is strictly required.\ntok('XRANGEIDENTIFIERLOOSE')\nsrc[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\\\*'\ntok('XRANGEIDENTIFIER')\nsrc[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\\\*'\n\ntok('XRANGEPLAIN')\nsrc[t.XRANGEPLAIN] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIER] + ')' +\n '(?:' + src[t.PRERELEASE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGEPLAINLOOSE')\nsrc[t.XRANGEPLAINLOOSE] = '[v=\\\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:' + src[t.PRERELEASELOOSE] + ')?' +\n src[t.BUILD] + '?' +\n ')?)?'\n\ntok('XRANGE')\nsrc[t.XRANGE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAIN] + '$'\ntok('XRANGELOOSE')\nsrc[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\\\s*' + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Coercion.\n// Extract anything that could conceivably be a part of a valid semver\ntok('COERCE')\nsrc[t.COERCE] = '(^|[^\\\\d])' +\n '(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:\\\\.(\\\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +\n '(?:$|[^\\\\d])'\ntok('COERCERTL')\nre[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')\n\n// Tilde ranges.\n// Meaning is \"reasonably at or greater than\"\ntok('LONETILDE')\nsrc[t.LONETILDE] = '(?:~>?)'\n\ntok('TILDETRIM')\nsrc[t.TILDETRIM] = '(\\\\s*)' + src[t.LONETILDE] + '\\\\s+'\nre[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')\nvar tildeTrimReplace = '$1~'\n\ntok('TILDE')\nsrc[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'\ntok('TILDELOOSE')\nsrc[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// Caret ranges.\n// Meaning is \"at least and backwards compatible with\"\ntok('LONECARET')\nsrc[t.LONECARET] = '(?:\\\\^)'\n\ntok('CARETTRIM')\nsrc[t.CARETTRIM] = '(\\\\s*)' + src[t.LONECARET] + '\\\\s+'\nre[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')\nvar caretTrimReplace = '$1^'\n\ntok('CARET')\nsrc[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'\ntok('CARETLOOSE')\nsrc[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'\n\n// A simple gt/lt/eq thing, or just \"\" to indicate \"any version\"\ntok('COMPARATORLOOSE')\nsrc[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'\ntok('COMPARATOR')\nsrc[t.COMPARATOR] = '^' + src[t.GTLT] + '\\\\s*(' + src[t.FULLPLAIN] + ')$|^$'\n\n// An expression to strip any whitespace between the gtlt and the thing\n// it modifies, so that `> 1.2.3` ==> `>1.2.3`\ntok('COMPARATORTRIM')\nsrc[t.COMPARATORTRIM] = '(\\\\s*)' + src[t.GTLT] +\n '\\\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'\n\n// this one has to use the /g flag\nre[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')\nvar comparatorTrimReplace = '$1$2$3'\n\n// Something like `1.2.3 - 1.2.4`\n// Note that these all use the loose form, because they'll be\n// checked against either the strict or loose comparator form\n// later.\ntok('HYPHENRANGE')\nsrc[t.HYPHENRANGE] = '^\\\\s*(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAIN] + ')' +\n '\\\\s*$'\n\ntok('HYPHENRANGELOOSE')\nsrc[t.HYPHENRANGELOOSE] = '^\\\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[t.XRANGEPLAINLOOSE] + ')' +\n '\\\\s*$'\n\n// Star ranges basically just allow anything at all.\ntok('STAR')\nsrc[t.STAR] = '(<|>)?=?\\\\s*\\\\*'\n\n// Compile to actual regexp objects.\n// All are flag-free, unless they were created above with a flag.\nfor (var i = 0; i < R; i++) {\n debug(i, src[i])\n if (!re[i]) {\n re[i] = new RegExp(src[i])\n }\n}\n\nexports.parse = parse\nfunction parse (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n if (version.length > MAX_LENGTH) {\n return null\n }\n\n var r = options.loose ? re[t.LOOSE] : re[t.FULL]\n if (!r.test(version)) {\n return null\n }\n\n try {\n return new SemVer(version, options)\n } catch (er) {\n return null\n }\n}\n\nexports.valid = valid\nfunction valid (version, options) {\n var v = parse(version, options)\n return v ? v.version : null\n}\n\nexports.clean = clean\nfunction clean (version, options) {\n var s = parse(version.trim().replace(/^[=v]+/, ''), options)\n return s ? s.version : null\n}\n\nexports.SemVer = SemVer\n\nfunction SemVer (version, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n if (version instanceof SemVer) {\n if (version.loose === options.loose) {\n return version\n } else {\n version = version.version\n }\n } else if (typeof version !== 'string') {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n if (version.length > MAX_LENGTH) {\n throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')\n }\n\n if (!(this instanceof SemVer)) {\n return new SemVer(version, options)\n }\n\n debug('SemVer', version, options)\n this.options = options\n this.loose = !!options.loose\n\n var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])\n\n if (!m) {\n throw new TypeError('Invalid Version: ' + version)\n }\n\n this.raw = version\n\n // these are actually numbers\n this.major = +m[1]\n this.minor = +m[2]\n this.patch = +m[3]\n\n if (this.major > MAX_SAFE_INTEGER || this.major < 0) {\n throw new TypeError('Invalid major version')\n }\n\n if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {\n throw new TypeError('Invalid minor version')\n }\n\n if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {\n throw new TypeError('Invalid patch version')\n }\n\n // numberify any prerelease numeric ids\n if (!m[4]) {\n this.prerelease = []\n } else {\n this.prerelease = m[4].split('.').map(function (id) {\n if (/^[0-9]+$/.test(id)) {\n var num = +id\n if (num >= 0 && num < MAX_SAFE_INTEGER) {\n return num\n }\n }\n return id\n })\n }\n\n this.build = m[5] ? m[5].split('.') : []\n this.format()\n}\n\nSemVer.prototype.format = function () {\n this.version = this.major + '.' + this.minor + '.' + this.patch\n if (this.prerelease.length) {\n this.version += '-' + this.prerelease.join('.')\n }\n return this.version\n}\n\nSemVer.prototype.toString = function () {\n return this.version\n}\n\nSemVer.prototype.compare = function (other) {\n debug('SemVer.compare', this.version, this.options, other)\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return this.compareMain(other) || this.comparePre(other)\n}\n\nSemVer.prototype.compareMain = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n return compareIdentifiers(this.major, other.major) ||\n compareIdentifiers(this.minor, other.minor) ||\n compareIdentifiers(this.patch, other.patch)\n}\n\nSemVer.prototype.comparePre = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n // NOT having a prerelease is > having one\n if (this.prerelease.length && !other.prerelease.length) {\n return -1\n } else if (!this.prerelease.length && other.prerelease.length) {\n return 1\n } else if (!this.prerelease.length && !other.prerelease.length) {\n return 0\n }\n\n var i = 0\n do {\n var a = this.prerelease[i]\n var b = other.prerelease[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\nSemVer.prototype.compareBuild = function (other) {\n if (!(other instanceof SemVer)) {\n other = new SemVer(other, this.options)\n }\n\n var i = 0\n do {\n var a = this.build[i]\n var b = other.build[i]\n debug('prerelease compare', i, a, b)\n if (a === undefined && b === undefined) {\n return 0\n } else if (b === undefined) {\n return 1\n } else if (a === undefined) {\n return -1\n } else if (a === b) {\n continue\n } else {\n return compareIdentifiers(a, b)\n }\n } while (++i)\n}\n\n// preminor will bump the version up to the next minor release, and immediately\n// down to pre-release. premajor and prepatch work the same way.\nSemVer.prototype.inc = function (release, identifier) {\n switch (release) {\n case 'premajor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor = 0\n this.major++\n this.inc('pre', identifier)\n break\n case 'preminor':\n this.prerelease.length = 0\n this.patch = 0\n this.minor++\n this.inc('pre', identifier)\n break\n case 'prepatch':\n // If this is already a prerelease, it will bump to the next version\n // drop any prereleases that might already exist, since they are not\n // relevant at this point.\n this.prerelease.length = 0\n this.inc('patch', identifier)\n this.inc('pre', identifier)\n break\n // If the input is a non-prerelease version, this acts the same as\n // prepatch.\n case 'prerelease':\n if (this.prerelease.length === 0) {\n this.inc('patch', identifier)\n }\n this.inc('pre', identifier)\n break\n\n case 'major':\n // If this is a pre-major version, bump up to the same major version.\n // Otherwise increment major.\n // 1.0.0-5 bumps to 1.0.0\n // 1.1.0 bumps to 2.0.0\n if (this.minor !== 0 ||\n this.patch !== 0 ||\n this.prerelease.length === 0) {\n this.major++\n }\n this.minor = 0\n this.patch = 0\n this.prerelease = []\n break\n case 'minor':\n // If this is a pre-minor version, bump up to the same minor version.\n // Otherwise increment minor.\n // 1.2.0-5 bumps to 1.2.0\n // 1.2.1 bumps to 1.3.0\n if (this.patch !== 0 || this.prerelease.length === 0) {\n this.minor++\n }\n this.patch = 0\n this.prerelease = []\n break\n case 'patch':\n // If this is not a pre-release version, it will increment the patch.\n // If it is a pre-release it will bump up to the same patch version.\n // 1.2.0-5 patches to 1.2.0\n // 1.2.0 patches to 1.2.1\n if (this.prerelease.length === 0) {\n this.patch++\n }\n this.prerelease = []\n break\n // This probably shouldn't be used publicly.\n // 1.0.0 \"pre\" would become 1.0.0-0 which is the wrong direction.\n case 'pre':\n if (this.prerelease.length === 0) {\n this.prerelease = [0]\n } else {\n var i = this.prerelease.length\n while (--i >= 0) {\n if (typeof this.prerelease[i] === 'number') {\n this.prerelease[i]++\n i = -2\n }\n }\n if (i === -1) {\n // didn't increment anything\n this.prerelease.push(0)\n }\n }\n if (identifier) {\n // 1.2.0-beta.1 bumps to 1.2.0-beta.2,\n // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0\n if (this.prerelease[0] === identifier) {\n if (isNaN(this.prerelease[1])) {\n this.prerelease = [identifier, 0]\n }\n } else {\n this.prerelease = [identifier, 0]\n }\n }\n break\n\n default:\n throw new Error('invalid increment argument: ' + release)\n }\n this.format()\n this.raw = this.version\n return this\n}\n\nexports.inc = inc\nfunction inc (version, release, loose, identifier) {\n if (typeof (loose) === 'string') {\n identifier = loose\n loose = undefined\n }\n\n try {\n return new SemVer(version, loose).inc(release, identifier).version\n } catch (er) {\n return null\n }\n}\n\nexports.diff = diff\nfunction diff (version1, version2) {\n if (eq(version1, version2)) {\n return null\n } else {\n var v1 = parse(version1)\n var v2 = parse(version2)\n var prefix = ''\n if (v1.prerelease.length || v2.prerelease.length) {\n prefix = 'pre'\n var defaultResult = 'prerelease'\n }\n for (var key in v1) {\n if (key === 'major' || key === 'minor' || key === 'patch') {\n if (v1[key] !== v2[key]) {\n return prefix + key\n }\n }\n }\n return defaultResult // may be undefined\n }\n}\n\nexports.compareIdentifiers = compareIdentifiers\n\nvar numeric = /^[0-9]+$/\nfunction compareIdentifiers (a, b) {\n var anum = numeric.test(a)\n var bnum = numeric.test(b)\n\n if (anum && bnum) {\n a = +a\n b = +b\n }\n\n return a === b ? 0\n : (anum && !bnum) ? -1\n : (bnum && !anum) ? 1\n : a < b ? -1\n : 1\n}\n\nexports.rcompareIdentifiers = rcompareIdentifiers\nfunction rcompareIdentifiers (a, b) {\n return compareIdentifiers(b, a)\n}\n\nexports.major = major\nfunction major (a, loose) {\n return new SemVer(a, loose).major\n}\n\nexports.minor = minor\nfunction minor (a, loose) {\n return new SemVer(a, loose).minor\n}\n\nexports.patch = patch\nfunction patch (a, loose) {\n return new SemVer(a, loose).patch\n}\n\nexports.compare = compare\nfunction compare (a, b, loose) {\n return new SemVer(a, loose).compare(new SemVer(b, loose))\n}\n\nexports.compareLoose = compareLoose\nfunction compareLoose (a, b) {\n return compare(a, b, true)\n}\n\nexports.compareBuild = compareBuild\nfunction compareBuild (a, b, loose) {\n var versionA = new SemVer(a, loose)\n var versionB = new SemVer(b, loose)\n return versionA.compare(versionB) || versionA.compareBuild(versionB)\n}\n\nexports.rcompare = rcompare\nfunction rcompare (a, b, loose) {\n return compare(b, a, loose)\n}\n\nexports.sort = sort\nfunction sort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(a, b, loose)\n })\n}\n\nexports.rsort = rsort\nfunction rsort (list, loose) {\n return list.sort(function (a, b) {\n return exports.compareBuild(b, a, loose)\n })\n}\n\nexports.gt = gt\nfunction gt (a, b, loose) {\n return compare(a, b, loose) > 0\n}\n\nexports.lt = lt\nfunction lt (a, b, loose) {\n return compare(a, b, loose) < 0\n}\n\nexports.eq = eq\nfunction eq (a, b, loose) {\n return compare(a, b, loose) === 0\n}\n\nexports.neq = neq\nfunction neq (a, b, loose) {\n return compare(a, b, loose) !== 0\n}\n\nexports.gte = gte\nfunction gte (a, b, loose) {\n return compare(a, b, loose) >= 0\n}\n\nexports.lte = lte\nfunction lte (a, b, loose) {\n return compare(a, b, loose) <= 0\n}\n\nexports.cmp = cmp\nfunction cmp (a, op, b, loose) {\n switch (op) {\n case '===':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a === b\n\n case '!==':\n if (typeof a === 'object')\n a = a.version\n if (typeof b === 'object')\n b = b.version\n return a !== b\n\n case '':\n case '=':\n case '==':\n return eq(a, b, loose)\n\n case '!=':\n return neq(a, b, loose)\n\n case '>':\n return gt(a, b, loose)\n\n case '>=':\n return gte(a, b, loose)\n\n case '<':\n return lt(a, b, loose)\n\n case '<=':\n return lte(a, b, loose)\n\n default:\n throw new TypeError('Invalid operator: ' + op)\n }\n}\n\nexports.Comparator = Comparator\nfunction Comparator (comp, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (comp instanceof Comparator) {\n if (comp.loose === !!options.loose) {\n return comp\n } else {\n comp = comp.value\n }\n }\n\n if (!(this instanceof Comparator)) {\n return new Comparator(comp, options)\n }\n\n debug('comparator', comp, options)\n this.options = options\n this.loose = !!options.loose\n this.parse(comp)\n\n if (this.semver === ANY) {\n this.value = ''\n } else {\n this.value = this.operator + this.semver.version\n }\n\n debug('comp', this)\n}\n\nvar ANY = {}\nComparator.prototype.parse = function (comp) {\n var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var m = comp.match(r)\n\n if (!m) {\n throw new TypeError('Invalid comparator: ' + comp)\n }\n\n this.operator = m[1] !== undefined ? m[1] : ''\n if (this.operator === '=') {\n this.operator = ''\n }\n\n // if it literally is just '>' or '' then allow anything.\n if (!m[2]) {\n this.semver = ANY\n } else {\n this.semver = new SemVer(m[2], this.options.loose)\n }\n}\n\nComparator.prototype.toString = function () {\n return this.value\n}\n\nComparator.prototype.test = function (version) {\n debug('Comparator.test', version, this.options.loose)\n\n if (this.semver === ANY || version === ANY) {\n return true\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n return cmp(version, this.operator, this.semver, this.options)\n}\n\nComparator.prototype.intersects = function (comp, options) {\n if (!(comp instanceof Comparator)) {\n throw new TypeError('a Comparator is required')\n }\n\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n var rangeTmp\n\n if (this.operator === '') {\n if (this.value === '') {\n return true\n }\n rangeTmp = new Range(comp.value, options)\n return satisfies(this.value, rangeTmp, options)\n } else if (comp.operator === '') {\n if (comp.value === '') {\n return true\n }\n rangeTmp = new Range(this.value, options)\n return satisfies(comp.semver, rangeTmp, options)\n }\n\n var sameDirectionIncreasing =\n (this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '>=' || comp.operator === '>')\n var sameDirectionDecreasing =\n (this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '<=' || comp.operator === '<')\n var sameSemVer = this.semver.version === comp.semver.version\n var differentDirectionsInclusive =\n (this.operator === '>=' || this.operator === '<=') &&\n (comp.operator === '>=' || comp.operator === '<=')\n var oppositeDirectionsLessThan =\n cmp(this.semver, '<', comp.semver, options) &&\n ((this.operator === '>=' || this.operator === '>') &&\n (comp.operator === '<=' || comp.operator === '<'))\n var oppositeDirectionsGreaterThan =\n cmp(this.semver, '>', comp.semver, options) &&\n ((this.operator === '<=' || this.operator === '<') &&\n (comp.operator === '>=' || comp.operator === '>'))\n\n return sameDirectionIncreasing || sameDirectionDecreasing ||\n (sameSemVer && differentDirectionsInclusive) ||\n oppositeDirectionsLessThan || oppositeDirectionsGreaterThan\n}\n\nexports.Range = Range\nfunction Range (range, options) {\n if (!options || typeof options !== 'object') {\n options = {\n loose: !!options,\n includePrerelease: false\n }\n }\n\n if (range instanceof Range) {\n if (range.loose === !!options.loose &&\n range.includePrerelease === !!options.includePrerelease) {\n return range\n } else {\n return new Range(range.raw, options)\n }\n }\n\n if (range instanceof Comparator) {\n return new Range(range.value, options)\n }\n\n if (!(this instanceof Range)) {\n return new Range(range, options)\n }\n\n this.options = options\n this.loose = !!options.loose\n this.includePrerelease = !!options.includePrerelease\n\n // First, split based on boolean or ||\n this.raw = range\n this.set = range.split(/\\s*\\|\\|\\s*/).map(function (range) {\n return this.parseRange(range.trim())\n }, this).filter(function (c) {\n // throw out any that are not relevant for whatever reason\n return c.length\n })\n\n if (!this.set.length) {\n throw new TypeError('Invalid SemVer Range: ' + range)\n }\n\n this.format()\n}\n\nRange.prototype.format = function () {\n this.range = this.set.map(function (comps) {\n return comps.join(' ').trim()\n }).join('||').trim()\n return this.range\n}\n\nRange.prototype.toString = function () {\n return this.range\n}\n\nRange.prototype.parseRange = function (range) {\n var loose = this.options.loose\n range = range.trim()\n // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`\n var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]\n range = range.replace(hr, hyphenReplace)\n debug('hyphen replace', range)\n // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`\n range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)\n debug('comparator trim', range, re[t.COMPARATORTRIM])\n\n // `~ 1.2.3` => `~1.2.3`\n range = range.replace(re[t.TILDETRIM], tildeTrimReplace)\n\n // `^ 1.2.3` => `^1.2.3`\n range = range.replace(re[t.CARETTRIM], caretTrimReplace)\n\n // normalize spaces\n range = range.split(/\\s+/).join(' ')\n\n // At this point, the range is completely trimmed and\n // ready to be split into comparators.\n\n var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]\n var set = range.split(' ').map(function (comp) {\n return parseComparator(comp, this.options)\n }, this).join(' ').split(/\\s+/)\n if (this.options.loose) {\n // in loose mode, throw out any that are not valid comparators\n set = set.filter(function (comp) {\n return !!comp.match(compRe)\n })\n }\n set = set.map(function (comp) {\n return new Comparator(comp, this.options)\n }, this)\n\n return set\n}\n\nRange.prototype.intersects = function (range, options) {\n if (!(range instanceof Range)) {\n throw new TypeError('a Range is required')\n }\n\n return this.set.some(function (thisComparators) {\n return (\n isSatisfiable(thisComparators, options) &&\n range.set.some(function (rangeComparators) {\n return (\n isSatisfiable(rangeComparators, options) &&\n thisComparators.every(function (thisComparator) {\n return rangeComparators.every(function (rangeComparator) {\n return thisComparator.intersects(rangeComparator, options)\n })\n })\n )\n })\n )\n })\n}\n\n// take a set of comparators and determine whether there\n// exists a version which can satisfy it\nfunction isSatisfiable (comparators, options) {\n var result = true\n var remainingComparators = comparators.slice()\n var testComparator = remainingComparators.pop()\n\n while (result && remainingComparators.length) {\n result = remainingComparators.every(function (otherComparator) {\n return testComparator.intersects(otherComparator, options)\n })\n\n testComparator = remainingComparators.pop()\n }\n\n return result\n}\n\n// Mostly just for testing and legacy API reasons\nexports.toComparators = toComparators\nfunction toComparators (range, options) {\n return new Range(range, options).set.map(function (comp) {\n return comp.map(function (c) {\n return c.value\n }).join(' ').trim().split(' ')\n })\n}\n\n// comprised of xranges, tildes, stars, and gtlt's at this point.\n// already replaced the hyphen ranges\n// turn into a set of JUST comparators.\nfunction parseComparator (comp, options) {\n debug('comp', comp, options)\n comp = replaceCarets(comp, options)\n debug('caret', comp)\n comp = replaceTildes(comp, options)\n debug('tildes', comp)\n comp = replaceXRanges(comp, options)\n debug('xrange', comp)\n comp = replaceStars(comp, options)\n debug('stars', comp)\n return comp\n}\n\nfunction isX (id) {\n return !id || id.toLowerCase() === 'x' || id === '*'\n}\n\n// ~, ~> --> * (any, kinda silly)\n// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0\n// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0\n// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0\n// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0\n// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0\nfunction replaceTildes (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceTilde(comp, options)\n }).join(' ')\n}\n\nfunction replaceTilde (comp, options) {\n var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('tilde', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n // ~1.2 == >=1.2.0 <1.3.0\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else if (pr) {\n debug('replaceTilde pr', pr)\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n } else {\n // ~1.2.3 == >=1.2.3 <1.3.0\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n\n debug('tilde return', ret)\n return ret\n })\n}\n\n// ^ --> * (any, kinda silly)\n// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0\n// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0\n// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0\n// ^1.2.3 --> >=1.2.3 <2.0.0\n// ^1.2.0 --> >=1.2.0 <2.0.0\nfunction replaceCarets (comp, options) {\n return comp.trim().split(/\\s+/).map(function (comp) {\n return replaceCaret(comp, options)\n }).join(' ')\n}\n\nfunction replaceCaret (comp, options) {\n debug('caret', comp, options)\n var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]\n return comp.replace(r, function (_, M, m, p, pr) {\n debug('caret', comp, _, M, m, p, pr)\n var ret\n\n if (isX(M)) {\n ret = ''\n } else if (isX(m)) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'\n } else if (isX(p)) {\n if (M === '0') {\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'\n } else {\n ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'\n }\n } else if (pr) {\n debug('replaceCaret pr', pr)\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p + '-' + pr +\n ' <' + (+M + 1) + '.0.0'\n }\n } else {\n debug('no pr')\n if (M === '0') {\n if (m === '0') {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + m + '.' + (+p + 1)\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0'\n }\n } else {\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + (+M + 1) + '.0.0'\n }\n }\n\n debug('caret return', ret)\n return ret\n })\n}\n\nfunction replaceXRanges (comp, options) {\n debug('replaceXRanges', comp, options)\n return comp.split(/\\s+/).map(function (comp) {\n return replaceXRange(comp, options)\n }).join(' ')\n}\n\nfunction replaceXRange (comp, options) {\n comp = comp.trim()\n var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]\n return comp.replace(r, function (ret, gtlt, M, m, p, pr) {\n debug('xRange', comp, ret, gtlt, M, m, p, pr)\n var xM = isX(M)\n var xm = xM || isX(m)\n var xp = xm || isX(p)\n var anyX = xp\n\n if (gtlt === '=' && anyX) {\n gtlt = ''\n }\n\n // if we're including prereleases in the match, then we need\n // to fix this to -0, the lowest possible prerelease value\n pr = options.includePrerelease ? '-0' : ''\n\n if (xM) {\n if (gtlt === '>' || gtlt === '<') {\n // nothing is allowed\n ret = '<0.0.0-0'\n } else {\n // nothing is forbidden\n ret = '*'\n }\n } else if (gtlt && anyX) {\n // we know patch is an x, because we have any x at all.\n // replace X with 0\n if (xm) {\n m = 0\n }\n p = 0\n\n if (gtlt === '>') {\n // >1 => >=2.0.0\n // >1.2 => >=1.3.0\n // >1.2.3 => >= 1.2.4\n gtlt = '>='\n if (xm) {\n M = +M + 1\n m = 0\n p = 0\n } else {\n m = +m + 1\n p = 0\n }\n } else if (gtlt === '<=') {\n // <=0.7.x is actually <0.8.0, since any 0.7.x should\n // pass. Similarly, <=7.x is actually <8.0.0, etc.\n gtlt = '<'\n if (xm) {\n M = +M + 1\n } else {\n m = +m + 1\n }\n }\n\n ret = gtlt + M + '.' + m + '.' + p + pr\n } else if (xm) {\n ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr\n } else if (xp) {\n ret = '>=' + M + '.' + m + '.0' + pr +\n ' <' + M + '.' + (+m + 1) + '.0' + pr\n }\n\n debug('xRange return', ret)\n\n return ret\n })\n}\n\n// Because * is AND-ed with everything else in the comparator,\n// and '' means \"any version\", just remove the *s entirely.\nfunction replaceStars (comp, options) {\n debug('replaceStars', comp, options)\n // Looseness is ignored here. star is always as loose as it gets!\n return comp.trim().replace(re[t.STAR], '')\n}\n\n// This function is passed to string.replace(re[t.HYPHENRANGE])\n// M, m, patch, prerelease, build\n// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5\n// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do\n// 1.2 - 3.4 => >=1.2.0 <3.5.0\nfunction hyphenReplace ($0,\n from, fM, fm, fp, fpr, fb,\n to, tM, tm, tp, tpr, tb) {\n if (isX(fM)) {\n from = ''\n } else if (isX(fm)) {\n from = '>=' + fM + '.0.0'\n } else if (isX(fp)) {\n from = '>=' + fM + '.' + fm + '.0'\n } else {\n from = '>=' + from\n }\n\n if (isX(tM)) {\n to = ''\n } else if (isX(tm)) {\n to = '<' + (+tM + 1) + '.0.0'\n } else if (isX(tp)) {\n to = '<' + tM + '.' + (+tm + 1) + '.0'\n } else if (tpr) {\n to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr\n } else {\n to = '<=' + to\n }\n\n return (from + ' ' + to).trim()\n}\n\n// if ANY of the sets match ALL of its comparators, then pass\nRange.prototype.test = function (version) {\n if (!version) {\n return false\n }\n\n if (typeof version === 'string') {\n try {\n version = new SemVer(version, this.options)\n } catch (er) {\n return false\n }\n }\n\n for (var i = 0; i < this.set.length; i++) {\n if (testSet(this.set[i], version, this.options)) {\n return true\n }\n }\n return false\n}\n\nfunction testSet (set, version, options) {\n for (var i = 0; i < set.length; i++) {\n if (!set[i].test(version)) {\n return false\n }\n }\n\n if (version.prerelease.length && !options.includePrerelease) {\n // Find the set of versions that are allowed to have prereleases\n // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0\n // That should allow `1.2.3-pr.2` to pass.\n // However, `1.2.4-alpha.notready` should NOT be allowed,\n // even though it's within the range set by the comparators.\n for (i = 0; i < set.length; i++) {\n debug(set[i].semver)\n if (set[i].semver === ANY) {\n continue\n }\n\n if (set[i].semver.prerelease.length > 0) {\n var allowed = set[i].semver\n if (allowed.major === version.major &&\n allowed.minor === version.minor &&\n allowed.patch === version.patch) {\n return true\n }\n }\n }\n\n // Version has a -pre, but it's not one of the ones we like.\n return false\n }\n\n return true\n}\n\nexports.satisfies = satisfies\nfunction satisfies (version, range, options) {\n try {\n range = new Range(range, options)\n } catch (er) {\n return false\n }\n return range.test(version)\n}\n\nexports.maxSatisfying = maxSatisfying\nfunction maxSatisfying (versions, range, options) {\n var max = null\n var maxSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!max || maxSV.compare(v) === -1) {\n // compare(max, v, true)\n max = v\n maxSV = new SemVer(max, options)\n }\n }\n })\n return max\n}\n\nexports.minSatisfying = minSatisfying\nfunction minSatisfying (versions, range, options) {\n var min = null\n var minSV = null\n try {\n var rangeObj = new Range(range, options)\n } catch (er) {\n return null\n }\n versions.forEach(function (v) {\n if (rangeObj.test(v)) {\n // satisfies(v, range, options)\n if (!min || minSV.compare(v) === 1) {\n // compare(min, v, true)\n min = v\n minSV = new SemVer(min, options)\n }\n }\n })\n return min\n}\n\nexports.minVersion = minVersion\nfunction minVersion (range, loose) {\n range = new Range(range, loose)\n\n var minver = new SemVer('0.0.0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = new SemVer('0.0.0-0')\n if (range.test(minver)) {\n return minver\n }\n\n minver = null\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n comparators.forEach(function (comparator) {\n // Clone to avoid manipulating the comparator's semver object.\n var compver = new SemVer(comparator.semver.version)\n switch (comparator.operator) {\n case '>':\n if (compver.prerelease.length === 0) {\n compver.patch++\n } else {\n compver.prerelease.push(0)\n }\n compver.raw = compver.format()\n /* fallthrough */\n case '':\n case '>=':\n if (!minver || gt(minver, compver)) {\n minver = compver\n }\n break\n case '<':\n case '<=':\n /* Ignore maximum versions */\n break\n /* istanbul ignore next */\n default:\n throw new Error('Unexpected operation: ' + comparator.operator)\n }\n })\n }\n\n if (minver && range.test(minver)) {\n return minver\n }\n\n return null\n}\n\nexports.validRange = validRange\nfunction validRange (range, options) {\n try {\n // Return '*' instead of '' so that truthiness works.\n // This will throw if it's invalid anyway\n return new Range(range, options).range || '*'\n } catch (er) {\n return null\n }\n}\n\n// Determine if version is less than all the versions possible in the range\nexports.ltr = ltr\nfunction ltr (version, range, options) {\n return outside(version, range, '<', options)\n}\n\n// Determine if version is greater than all the versions possible in the range.\nexports.gtr = gtr\nfunction gtr (version, range, options) {\n return outside(version, range, '>', options)\n}\n\nexports.outside = outside\nfunction outside (version, range, hilo, options) {\n version = new SemVer(version, options)\n range = new Range(range, options)\n\n var gtfn, ltefn, ltfn, comp, ecomp\n switch (hilo) {\n case '>':\n gtfn = gt\n ltefn = lte\n ltfn = lt\n comp = '>'\n ecomp = '>='\n break\n case '<':\n gtfn = lt\n ltefn = gte\n ltfn = gt\n comp = '<'\n ecomp = '<='\n break\n default:\n throw new TypeError('Must provide a hilo val of \"<\" or \">\"')\n }\n\n // If it satisifes the range it is not outside\n if (satisfies(version, range, options)) {\n return false\n }\n\n // From now on, variable terms are as if we're in \"gtr\" mode.\n // but note that everything is flipped for the \"ltr\" function.\n\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i]\n\n var high = null\n var low = null\n\n comparators.forEach(function (comparator) {\n if (comparator.semver === ANY) {\n comparator = new Comparator('>=0.0.0')\n }\n high = high || comparator\n low = low || comparator\n if (gtfn(comparator.semver, high.semver, options)) {\n high = comparator\n } else if (ltfn(comparator.semver, low.semver, options)) {\n low = comparator\n }\n })\n\n // If the edge version comparator has a operator then our version\n // isn't outside it\n if (high.operator === comp || high.operator === ecomp) {\n return false\n }\n\n // If the lowest version comparator has an operator and our version\n // is less than it then it isn't higher than the range\n if ((!low.operator || low.operator === comp) &&\n ltefn(version, low.semver)) {\n return false\n } else if (low.operator === ecomp && ltfn(version, low.semver)) {\n return false\n }\n }\n return true\n}\n\nexports.prerelease = prerelease\nfunction prerelease (version, options) {\n var parsed = parse(version, options)\n return (parsed && parsed.prerelease.length) ? parsed.prerelease : null\n}\n\nexports.intersects = intersects\nfunction intersects (r1, r2, options) {\n r1 = new Range(r1, options)\n r2 = new Range(r2, options)\n return r1.intersects(r2)\n}\n\nexports.coerce = coerce\nfunction coerce (version, options) {\n if (version instanceof SemVer) {\n return version\n }\n\n if (typeof version === 'number') {\n version = String(version)\n }\n\n if (typeof version !== 'string') {\n return null\n }\n\n options = options || {}\n\n var match = null\n if (!options.rtl) {\n match = version.match(re[t.COERCE])\n } else {\n // Find the right-most coercible string that does not share\n // a terminus with a more left-ward coercible string.\n // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'\n //\n // Walk through the string checking with a /g regexp\n // Manually set the index so as to pick up overlapping matches.\n // Stop when we get a match that ends at the string end, since no\n // coercible string can be more right-ward without the same terminus.\n var next\n while ((next = re[t.COERCERTL].exec(version)) &&\n (!match || match.index + match[0].length !== version.length)\n ) {\n if (!match ||\n next.index + next[0].length !== match.index + match[0].length) {\n match = next\n }\n re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length\n }\n // leave it in a clean state\n re[t.COERCERTL].lastIndex = -1\n }\n\n if (match === null) {\n return null\n }\n\n return parse(match[2] +\n '.' + (match[3] || '0') +\n '.' + (match[4] || '0'), options)\n}\n","/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nvar byteToHex = [];\nfor (var i = 0; i < 256; ++i) {\n byteToHex[i] = (i + 0x100).toString(16).substr(1);\n}\n\nfunction bytesToUuid(buf, offset) {\n var i = offset || 0;\n var bth = byteToHex;\n // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4\n return ([\n bth[buf[i++]], bth[buf[i++]],\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]], '-',\n bth[buf[i++]], bth[buf[i++]],\n bth[buf[i++]], bth[buf[i++]],\n bth[buf[i++]], bth[buf[i++]]\n ]).join('');\n}\n\nmodule.exports = bytesToUuid;\n","// Unique ID creation requires a high quality random # generator. In node.js\n// this is pretty straight-forward - we use the crypto API.\n\nvar crypto = require('crypto');\n\nmodule.exports = function nodeRNG() {\n return crypto.randomBytes(16);\n};\n","var rng = require('./lib/rng');\nvar bytesToUuid = require('./lib/bytesToUuid');\n\nfunction v4(options, buf, offset) {\n var i = buf && offset || 0;\n\n if (typeof(options) == 'string') {\n buf = options === 'binary' ? new Array(16) : null;\n options = null;\n }\n options = options || {};\n\n var rnds = options.random || (options.rng || rng)();\n\n // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n rnds[6] = (rnds[6] & 0x0f) | 0x40;\n rnds[8] = (rnds[8] & 0x3f) | 0x80;\n\n // Copy bytes to buffer, if provided\n if (buf) {\n for (var ii = 0; ii < 16; ++ii) {\n buf[i + ii] = rnds[ii];\n }\n }\n\n return buf || bytesToUuid(rnds);\n}\n\nmodule.exports = v4;\n","const { hasOwnProperty } = Object.prototype\n\nconst eol = typeof process !== 'undefined' &&\n process.platform === 'win32' ? '\\r\\n' : '\\n'\n\nconst encode = (obj, opt) => {\n const children = []\n let out = ''\n\n if (typeof opt === 'string') {\n opt = {\n section: opt,\n whitespace: false,\n }\n } else {\n opt = opt || Object.create(null)\n opt.whitespace = opt.whitespace === true\n }\n\n const separator = opt.whitespace ? ' = ' : '='\n\n for (const k of Object.keys(obj)) {\n const val = obj[k]\n if (val && Array.isArray(val)) {\n for (const item of val)\n out += safe(k + '[]') + separator + safe(item) + '\\n'\n } else if (val && typeof val === 'object')\n children.push(k)\n else\n out += safe(k) + separator + safe(val) + eol\n }\n\n if (opt.section && out.length)\n out = '[' + safe(opt.section) + ']' + eol + out\n\n for (const k of children) {\n const nk = dotSplit(k).join('\\\\.')\n const section = (opt.section ? opt.section + '.' : '') + nk\n const { whitespace } = opt\n const child = encode(obj[k], {\n section,\n whitespace,\n })\n if (out.length && child.length)\n out += eol\n\n out += child\n }\n\n return out\n}\n\nconst dotSplit = str =>\n str.replace(/\\1/g, '\\u0002LITERAL\\\\1LITERAL\\u0002')\n .replace(/\\\\\\./g, '\\u0001')\n .split(/\\./)\n .map(part =>\n part.replace(/\\1/g, '\\\\.')\n .replace(/\\2LITERAL\\\\1LITERAL\\2/g, '\\u0001'))\n\nconst decode = str => {\n const out = Object.create(null)\n let p = out\n let section = null\n // section |key = value\n const re = /^\\[([^\\]]*)\\]$|^([^=]+)(=(.*))?$/i\n const lines = str.split(/[\\r\\n]+/g)\n\n for (const line of lines) {\n if (!line || line.match(/^\\s*[;#]/))\n continue\n const match = line.match(re)\n if (!match)\n continue\n if (match[1] !== undefined) {\n section = unsafe(match[1])\n if (section === '__proto__') {\n // not allowed\n // keep parsing the section, but don't attach it.\n p = Object.create(null)\n continue\n }\n p = out[section] = out[section] || Object.create(null)\n continue\n }\n const keyRaw = unsafe(match[2])\n const isArray = keyRaw.length > 2 && keyRaw.slice(-2) === '[]'\n const key = isArray ? keyRaw.slice(0, -2) : keyRaw\n if (key === '__proto__')\n continue\n const valueRaw = match[3] ? unsafe(match[4]) : true\n const value = valueRaw === 'true' ||\n valueRaw === 'false' ||\n valueRaw === 'null' ? JSON.parse(valueRaw)\n : valueRaw\n\n // Convert keys with '[]' suffix to an array\n if (isArray) {\n if (!hasOwnProperty.call(p, key))\n p[key] = []\n else if (!Array.isArray(p[key]))\n p[key] = [p[key]]\n }\n\n // safeguard against resetting a previously defined\n // array by accidentally forgetting the brackets\n if (Array.isArray(p[key]))\n p[key].push(value)\n else\n p[key] = value\n }\n\n // {a:{y:1},\"a.b\":{x:2}} --> {a:{y:1,b:{x:2}}}\n // use a filter to return the keys that have to be deleted.\n const remove = []\n for (const k of Object.keys(out)) {\n if (!hasOwnProperty.call(out, k) ||\n typeof out[k] !== 'object' ||\n Array.isArray(out[k]))\n continue\n\n // see if the parent section is also an object.\n // if so, add it to that, and mark this one for deletion\n const parts = dotSplit(k)\n let p = out\n const l = parts.pop()\n const nl = l.replace(/\\\\\\./g, '.')\n for (const part of parts) {\n if (part === '__proto__')\n continue\n if (!hasOwnProperty.call(p, part) || typeof p[part] !== 'object')\n p[part] = Object.create(null)\n p = p[part]\n }\n if (p === out && nl === l)\n continue\n\n p[nl] = out[k]\n remove.push(k)\n }\n for (const del of remove)\n delete out[del]\n\n return out\n}\n\nconst isQuoted = val =>\n (val.charAt(0) === '\"' && val.slice(-1) === '\"') ||\n (val.charAt(0) === \"'\" && val.slice(-1) === \"'\")\n\nconst safe = val =>\n (typeof val !== 'string' ||\n val.match(/[=\\r\\n]/) ||\n val.match(/^\\[/) ||\n (val.length > 1 &&\n isQuoted(val)) ||\n val !== val.trim())\n ? JSON.stringify(val)\n : val.replace(/;/g, '\\\\;').replace(/#/g, '\\\\#')\n\nconst unsafe = (val, doUnesc) => {\n val = (val || '').trim()\n if (isQuoted(val)) {\n // remove the single quotes before calling JSON.parse\n if (val.charAt(0) === \"'\")\n val = val.substr(1, val.length - 2)\n\n try {\n val = JSON.parse(val)\n } catch (_) {}\n } else {\n // walk the val to find the first not-escaped ; character\n let esc = false\n let unesc = ''\n for (let i = 0, l = val.length; i < l; i++) {\n const c = val.charAt(i)\n if (esc) {\n if ('\\\\;#'.indexOf(c) !== -1)\n unesc += c\n else\n unesc += '\\\\' + c\n\n esc = false\n } else if (';#'.indexOf(c) !== -1)\n break\n else if (c === '\\\\')\n esc = true\n else\n unesc += c\n }\n if (esc)\n unesc += '\\\\'\n\n return unesc.trim()\n }\n return val\n}\n\nmodule.exports = {\n parse: decode,\n decode,\n stringify: encode,\n encode,\n safe,\n unsafe,\n}\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nvar PlainValue = require('./PlainValue-ec8e588e.js');\nvar resolveSeq = require('./resolveSeq-4a68b39b.js');\nvar Schema = require('./Schema-42e9705c.js');\n\nconst defaultOptions = {\n anchorPrefix: 'a',\n customTags: null,\n indent: 2,\n indentSeq: true,\n keepCstNodes: false,\n keepNodeTypes: true,\n keepBlobsInJSON: true,\n mapAsMap: false,\n maxAliasCount: 100,\n prettyErrors: false,\n // TODO Set true in v2\n simpleKeys: false,\n version: '1.2'\n};\nconst scalarOptions = {\n get binary() {\n return resolveSeq.binaryOptions;\n },\n\n set binary(opt) {\n Object.assign(resolveSeq.binaryOptions, opt);\n },\n\n get bool() {\n return resolveSeq.boolOptions;\n },\n\n set bool(opt) {\n Object.assign(resolveSeq.boolOptions, opt);\n },\n\n get int() {\n return resolveSeq.intOptions;\n },\n\n set int(opt) {\n Object.assign(resolveSeq.intOptions, opt);\n },\n\n get null() {\n return resolveSeq.nullOptions;\n },\n\n set null(opt) {\n Object.assign(resolveSeq.nullOptions, opt);\n },\n\n get str() {\n return resolveSeq.strOptions;\n },\n\n set str(opt) {\n Object.assign(resolveSeq.strOptions, opt);\n }\n\n};\nconst documentOptions = {\n '1.0': {\n schema: 'yaml-1.1',\n merge: true,\n tagPrefixes: [{\n handle: '!',\n prefix: PlainValue.defaultTagPrefix\n }, {\n handle: '!!',\n prefix: 'tag:private.yaml.org,2002:'\n }]\n },\n '1.1': {\n schema: 'yaml-1.1',\n merge: true,\n tagPrefixes: [{\n handle: '!',\n prefix: '!'\n }, {\n handle: '!!',\n prefix: PlainValue.defaultTagPrefix\n }]\n },\n '1.2': {\n schema: 'core',\n merge: false,\n tagPrefixes: [{\n handle: '!',\n prefix: '!'\n }, {\n handle: '!!',\n prefix: PlainValue.defaultTagPrefix\n }]\n }\n};\n\nfunction stringifyTag(doc, tag) {\n if ((doc.version || doc.options.version) === '1.0') {\n const priv = tag.match(/^tag:private\\.yaml\\.org,2002:([^:/]+)$/);\n if (priv) return '!' + priv[1];\n const vocab = tag.match(/^tag:([a-zA-Z0-9-]+)\\.yaml\\.org,2002:(.*)/);\n return vocab ? `!${vocab[1]}/${vocab[2]}` : `!${tag.replace(/^tag:/, '')}`;\n }\n\n let p = doc.tagPrefixes.find(p => tag.indexOf(p.prefix) === 0);\n\n if (!p) {\n const dtp = doc.getDefaults().tagPrefixes;\n p = dtp && dtp.find(p => tag.indexOf(p.prefix) === 0);\n }\n\n if (!p) return tag[0] === '!' ? tag : `!<${tag}>`;\n const suffix = tag.substr(p.prefix.length).replace(/[!,[\\]{}]/g, ch => ({\n '!': '%21',\n ',': '%2C',\n '[': '%5B',\n ']': '%5D',\n '{': '%7B',\n '}': '%7D'\n })[ch]);\n return p.handle + suffix;\n}\n\nfunction getTagObject(tags, item) {\n if (item instanceof resolveSeq.Alias) return resolveSeq.Alias;\n\n if (item.tag) {\n const match = tags.filter(t => t.tag === item.tag);\n if (match.length > 0) return match.find(t => t.format === item.format) || match[0];\n }\n\n let tagObj, obj;\n\n if (item instanceof resolveSeq.Scalar) {\n obj = item.value; // TODO: deprecate/remove class check\n\n const match = tags.filter(t => t.identify && t.identify(obj) || t.class && obj instanceof t.class);\n tagObj = match.find(t => t.format === item.format) || match.find(t => !t.format);\n } else {\n obj = item;\n tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);\n }\n\n if (!tagObj) {\n const name = obj && obj.constructor ? obj.constructor.name : typeof obj;\n throw new Error(`Tag not resolved for ${name} value`);\n }\n\n return tagObj;\n} // needs to be called before value stringifier to allow for circular anchor refs\n\n\nfunction stringifyProps(node, tagObj, {\n anchors,\n doc\n}) {\n const props = [];\n const anchor = doc.anchors.getName(node);\n\n if (anchor) {\n anchors[anchor] = node;\n props.push(`&${anchor}`);\n }\n\n if (node.tag) {\n props.push(stringifyTag(doc, node.tag));\n } else if (!tagObj.default) {\n props.push(stringifyTag(doc, tagObj.tag));\n }\n\n return props.join(' ');\n}\n\nfunction stringify(item, ctx, onComment, onChompKeep) {\n const {\n anchors,\n schema\n } = ctx.doc;\n let tagObj;\n\n if (!(item instanceof resolveSeq.Node)) {\n const createCtx = {\n aliasNodes: [],\n onTagObj: o => tagObj = o,\n prevObjects: new Map()\n };\n item = schema.createNode(item, true, null, createCtx);\n\n for (const alias of createCtx.aliasNodes) {\n alias.source = alias.source.node;\n let name = anchors.getName(alias.source);\n\n if (!name) {\n name = anchors.newName();\n anchors.map[name] = alias.source;\n }\n }\n }\n\n if (item instanceof resolveSeq.Pair) return item.toString(ctx, onComment, onChompKeep);\n if (!tagObj) tagObj = getTagObject(schema.tags, item);\n const props = stringifyProps(item, tagObj, ctx);\n if (props.length > 0) ctx.indentAtStart = (ctx.indentAtStart || 0) + props.length + 1;\n const str = typeof tagObj.stringify === 'function' ? tagObj.stringify(item, ctx, onComment, onChompKeep) : item instanceof resolveSeq.Scalar ? resolveSeq.stringifyString(item, ctx, onComment, onChompKeep) : item.toString(ctx, onComment, onChompKeep);\n if (!props) return str;\n return item instanceof resolveSeq.Scalar || str[0] === '{' || str[0] === '[' ? `${props} ${str}` : `${props}\\n${ctx.indent}${str}`;\n}\n\nclass Anchors {\n static validAnchorNode(node) {\n return node instanceof resolveSeq.Scalar || node instanceof resolveSeq.YAMLSeq || node instanceof resolveSeq.YAMLMap;\n }\n\n constructor(prefix) {\n PlainValue._defineProperty(this, \"map\", {});\n\n this.prefix = prefix;\n }\n\n createAlias(node, name) {\n this.setAnchor(node, name);\n return new resolveSeq.Alias(node);\n }\n\n createMergePair(...sources) {\n const merge = new resolveSeq.Merge();\n merge.value.items = sources.map(s => {\n if (s instanceof resolveSeq.Alias) {\n if (s.source instanceof resolveSeq.YAMLMap) return s;\n } else if (s instanceof resolveSeq.YAMLMap) {\n return this.createAlias(s);\n }\n\n throw new Error('Merge sources must be Map nodes or their Aliases');\n });\n return merge;\n }\n\n getName(node) {\n const {\n map\n } = this;\n return Object.keys(map).find(a => map[a] === node);\n }\n\n getNames() {\n return Object.keys(this.map);\n }\n\n getNode(name) {\n return this.map[name];\n }\n\n newName(prefix) {\n if (!prefix) prefix = this.prefix;\n const names = Object.keys(this.map);\n\n for (let i = 1; true; ++i) {\n const name = `${prefix}${i}`;\n if (!names.includes(name)) return name;\n }\n } // During parsing, map & aliases contain CST nodes\n\n\n resolveNodes() {\n const {\n map,\n _cstAliases\n } = this;\n Object.keys(map).forEach(a => {\n map[a] = map[a].resolved;\n });\n\n _cstAliases.forEach(a => {\n a.source = a.source.resolved;\n });\n\n delete this._cstAliases;\n }\n\n setAnchor(node, name) {\n if (node != null && !Anchors.validAnchorNode(node)) {\n throw new Error('Anchors may only be set for Scalar, Seq and Map nodes');\n }\n\n if (name && /[\\x00-\\x19\\s,[\\]{}]/.test(name)) {\n throw new Error('Anchor names must not contain whitespace or control characters');\n }\n\n const {\n map\n } = this;\n const prev = node && Object.keys(map).find(a => map[a] === node);\n\n if (prev) {\n if (!name) {\n return prev;\n } else if (prev !== name) {\n delete map[prev];\n map[name] = node;\n }\n } else {\n if (!name) {\n if (!node) return null;\n name = this.newName();\n }\n\n map[name] = node;\n }\n\n return name;\n }\n\n}\n\nconst visit = (node, tags) => {\n if (node && typeof node === 'object') {\n const {\n tag\n } = node;\n\n if (node instanceof resolveSeq.Collection) {\n if (tag) tags[tag] = true;\n node.items.forEach(n => visit(n, tags));\n } else if (node instanceof resolveSeq.Pair) {\n visit(node.key, tags);\n visit(node.value, tags);\n } else if (node instanceof resolveSeq.Scalar) {\n if (tag) tags[tag] = true;\n }\n }\n\n return tags;\n};\n\nconst listTagNames = node => Object.keys(visit(node, {}));\n\nfunction parseContents(doc, contents) {\n const comments = {\n before: [],\n after: []\n };\n let body = undefined;\n let spaceBefore = false;\n\n for (const node of contents) {\n if (node.valueRange) {\n if (body !== undefined) {\n const msg = 'Document contains trailing content not separated by a ... or --- line';\n doc.errors.push(new PlainValue.YAMLSyntaxError(node, msg));\n break;\n }\n\n const res = resolveSeq.resolveNode(doc, node);\n\n if (spaceBefore) {\n res.spaceBefore = true;\n spaceBefore = false;\n }\n\n body = res;\n } else if (node.comment !== null) {\n const cc = body === undefined ? comments.before : comments.after;\n cc.push(node.comment);\n } else if (node.type === PlainValue.Type.BLANK_LINE) {\n spaceBefore = true;\n\n if (body === undefined && comments.before.length > 0 && !doc.commentBefore) {\n // space-separated comments at start are parsed as document comments\n doc.commentBefore = comments.before.join('\\n');\n comments.before = [];\n }\n }\n }\n\n doc.contents = body || null;\n\n if (!body) {\n doc.comment = comments.before.concat(comments.after).join('\\n') || null;\n } else {\n const cb = comments.before.join('\\n');\n\n if (cb) {\n const cbNode = body instanceof resolveSeq.Collection && body.items[0] ? body.items[0] : body;\n cbNode.commentBefore = cbNode.commentBefore ? `${cb}\\n${cbNode.commentBefore}` : cb;\n }\n\n doc.comment = comments.after.join('\\n') || null;\n }\n}\n\nfunction resolveTagDirective({\n tagPrefixes\n}, directive) {\n const [handle, prefix] = directive.parameters;\n\n if (!handle || !prefix) {\n const msg = 'Insufficient parameters given for %TAG directive';\n throw new PlainValue.YAMLSemanticError(directive, msg);\n }\n\n if (tagPrefixes.some(p => p.handle === handle)) {\n const msg = 'The %TAG directive must only be given at most once per handle in the same document.';\n throw new PlainValue.YAMLSemanticError(directive, msg);\n }\n\n return {\n handle,\n prefix\n };\n}\n\nfunction resolveYamlDirective(doc, directive) {\n let [version] = directive.parameters;\n if (directive.name === 'YAML:1.0') version = '1.0';\n\n if (!version) {\n const msg = 'Insufficient parameters given for %YAML directive';\n throw new PlainValue.YAMLSemanticError(directive, msg);\n }\n\n if (!documentOptions[version]) {\n const v0 = doc.version || doc.options.version;\n const msg = `Document will be parsed as YAML ${v0} rather than YAML ${version}`;\n doc.warnings.push(new PlainValue.YAMLWarning(directive, msg));\n }\n\n return version;\n}\n\nfunction parseDirectives(doc, directives, prevDoc) {\n const directiveComments = [];\n let hasDirectives = false;\n\n for (const directive of directives) {\n const {\n comment,\n name\n } = directive;\n\n switch (name) {\n case 'TAG':\n try {\n doc.tagPrefixes.push(resolveTagDirective(doc, directive));\n } catch (error) {\n doc.errors.push(error);\n }\n\n hasDirectives = true;\n break;\n\n case 'YAML':\n case 'YAML:1.0':\n if (doc.version) {\n const msg = 'The %YAML directive must only be given at most once per document.';\n doc.errors.push(new PlainValue.YAMLSemanticError(directive, msg));\n }\n\n try {\n doc.version = resolveYamlDirective(doc, directive);\n } catch (error) {\n doc.errors.push(error);\n }\n\n hasDirectives = true;\n break;\n\n default:\n if (name) {\n const msg = `YAML only supports %TAG and %YAML directives, and not %${name}`;\n doc.warnings.push(new PlainValue.YAMLWarning(directive, msg));\n }\n\n }\n\n if (comment) directiveComments.push(comment);\n }\n\n if (prevDoc && !hasDirectives && '1.1' === (doc.version || prevDoc.version || doc.options.version)) {\n const copyTagPrefix = ({\n handle,\n prefix\n }) => ({\n handle,\n prefix\n });\n\n doc.tagPrefixes = prevDoc.tagPrefixes.map(copyTagPrefix);\n doc.version = prevDoc.version;\n }\n\n doc.commentBefore = directiveComments.join('\\n') || null;\n}\n\nfunction assertCollection(contents) {\n if (contents instanceof resolveSeq.Collection) return true;\n throw new Error('Expected a YAML collection as document contents');\n}\n\nclass Document {\n constructor(options) {\n this.anchors = new Anchors(options.anchorPrefix);\n this.commentBefore = null;\n this.comment = null;\n this.contents = null;\n this.directivesEndMarker = null;\n this.errors = [];\n this.options = options;\n this.schema = null;\n this.tagPrefixes = [];\n this.version = null;\n this.warnings = [];\n }\n\n add(value) {\n assertCollection(this.contents);\n return this.contents.add(value);\n }\n\n addIn(path, value) {\n assertCollection(this.contents);\n this.contents.addIn(path, value);\n }\n\n delete(key) {\n assertCollection(this.contents);\n return this.contents.delete(key);\n }\n\n deleteIn(path) {\n if (resolveSeq.isEmptyPath(path)) {\n if (this.contents == null) return false;\n this.contents = null;\n return true;\n }\n\n assertCollection(this.contents);\n return this.contents.deleteIn(path);\n }\n\n getDefaults() {\n return Document.defaults[this.version] || Document.defaults[this.options.version] || {};\n }\n\n get(key, keepScalar) {\n return this.contents instanceof resolveSeq.Collection ? this.contents.get(key, keepScalar) : undefined;\n }\n\n getIn(path, keepScalar) {\n if (resolveSeq.isEmptyPath(path)) return !keepScalar && this.contents instanceof resolveSeq.Scalar ? this.contents.value : this.contents;\n return this.contents instanceof resolveSeq.Collection ? this.contents.getIn(path, keepScalar) : undefined;\n }\n\n has(key) {\n return this.contents instanceof resolveSeq.Collection ? this.contents.has(key) : false;\n }\n\n hasIn(path) {\n if (resolveSeq.isEmptyPath(path)) return this.contents !== undefined;\n return this.contents instanceof resolveSeq.Collection ? this.contents.hasIn(path) : false;\n }\n\n set(key, value) {\n assertCollection(this.contents);\n this.contents.set(key, value);\n }\n\n setIn(path, value) {\n if (resolveSeq.isEmptyPath(path)) this.contents = value;else {\n assertCollection(this.contents);\n this.contents.setIn(path, value);\n }\n }\n\n setSchema(id, customTags) {\n if (!id && !customTags && this.schema) return;\n if (typeof id === 'number') id = id.toFixed(1);\n\n if (id === '1.0' || id === '1.1' || id === '1.2') {\n if (this.version) this.version = id;else this.options.version = id;\n delete this.options.schema;\n } else if (id && typeof id === 'string') {\n this.options.schema = id;\n }\n\n if (Array.isArray(customTags)) this.options.customTags = customTags;\n const opt = Object.assign({}, this.getDefaults(), this.options);\n this.schema = new Schema.Schema(opt);\n }\n\n parse(node, prevDoc) {\n if (this.options.keepCstNodes) this.cstNode = node;\n if (this.options.keepNodeTypes) this.type = 'DOCUMENT';\n const {\n directives = [],\n contents = [],\n directivesEndMarker,\n error,\n valueRange\n } = node;\n\n if (error) {\n if (!error.source) error.source = this;\n this.errors.push(error);\n }\n\n parseDirectives(this, directives, prevDoc);\n if (directivesEndMarker) this.directivesEndMarker = true;\n this.range = valueRange ? [valueRange.start, valueRange.end] : null;\n this.setSchema();\n this.anchors._cstAliases = [];\n parseContents(this, contents);\n this.anchors.resolveNodes();\n\n if (this.options.prettyErrors) {\n for (const error of this.errors) if (error instanceof PlainValue.YAMLError) error.makePretty();\n\n for (const warn of this.warnings) if (warn instanceof PlainValue.YAMLError) warn.makePretty();\n }\n\n return this;\n }\n\n listNonDefaultTags() {\n return listTagNames(this.contents).filter(t => t.indexOf(Schema.Schema.defaultPrefix) !== 0);\n }\n\n setTagPrefix(handle, prefix) {\n if (handle[0] !== '!' || handle[handle.length - 1] !== '!') throw new Error('Handle must start and end with !');\n\n if (prefix) {\n const prev = this.tagPrefixes.find(p => p.handle === handle);\n if (prev) prev.prefix = prefix;else this.tagPrefixes.push({\n handle,\n prefix\n });\n } else {\n this.tagPrefixes = this.tagPrefixes.filter(p => p.handle !== handle);\n }\n }\n\n toJSON(arg, onAnchor) {\n const {\n keepBlobsInJSON,\n mapAsMap,\n maxAliasCount\n } = this.options;\n const keep = keepBlobsInJSON && (typeof arg !== 'string' || !(this.contents instanceof resolveSeq.Scalar));\n const ctx = {\n doc: this,\n indentStep: ' ',\n keep,\n mapAsMap: keep && !!mapAsMap,\n maxAliasCount,\n stringify // Requiring directly in Pair would create circular dependencies\n\n };\n const anchorNames = Object.keys(this.anchors.map);\n if (anchorNames.length > 0) ctx.anchors = new Map(anchorNames.map(name => [this.anchors.map[name], {\n alias: [],\n aliasCount: 0,\n count: 1\n }]));\n const res = resolveSeq.toJSON(this.contents, arg, ctx);\n if (typeof onAnchor === 'function' && ctx.anchors) for (const {\n count,\n res\n } of ctx.anchors.values()) onAnchor(res, count);\n return res;\n }\n\n toString() {\n if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified');\n const indentSize = this.options.indent;\n\n if (!Number.isInteger(indentSize) || indentSize <= 0) {\n const s = JSON.stringify(indentSize);\n throw new Error(`\"indent\" option must be a positive integer, not ${s}`);\n }\n\n this.setSchema();\n const lines = [];\n let hasDirectives = false;\n\n if (this.version) {\n let vd = '%YAML 1.2';\n\n if (this.schema.name === 'yaml-1.1') {\n if (this.version === '1.0') vd = '%YAML:1.0';else if (this.version === '1.1') vd = '%YAML 1.1';\n }\n\n lines.push(vd);\n hasDirectives = true;\n }\n\n const tagNames = this.listNonDefaultTags();\n this.tagPrefixes.forEach(({\n handle,\n prefix\n }) => {\n if (tagNames.some(t => t.indexOf(prefix) === 0)) {\n lines.push(`%TAG ${handle} ${prefix}`);\n hasDirectives = true;\n }\n });\n if (hasDirectives || this.directivesEndMarker) lines.push('---');\n\n if (this.commentBefore) {\n if (hasDirectives || !this.directivesEndMarker) lines.unshift('');\n lines.unshift(this.commentBefore.replace(/^/gm, '#'));\n }\n\n const ctx = {\n anchors: {},\n doc: this,\n indent: '',\n indentStep: ' '.repeat(indentSize),\n stringify // Requiring directly in nodes would create circular dependencies\n\n };\n let chompKeep = false;\n let contentComment = null;\n\n if (this.contents) {\n if (this.contents instanceof resolveSeq.Node) {\n if (this.contents.spaceBefore && (hasDirectives || this.directivesEndMarker)) lines.push('');\n if (this.contents.commentBefore) lines.push(this.contents.commentBefore.replace(/^/gm, '#')); // top-level block scalars need to be indented if followed by a comment\n\n ctx.forceBlockIndent = !!this.comment;\n contentComment = this.contents.comment;\n }\n\n const onChompKeep = contentComment ? null : () => chompKeep = true;\n const body = stringify(this.contents, ctx, () => contentComment = null, onChompKeep);\n lines.push(resolveSeq.addComment(body, '', contentComment));\n } else if (this.contents !== undefined) {\n lines.push(stringify(this.contents, ctx));\n }\n\n if (this.comment) {\n if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') lines.push('');\n lines.push(this.comment.replace(/^/gm, '#'));\n }\n\n return lines.join('\\n') + '\\n';\n }\n\n}\n\nPlainValue._defineProperty(Document, \"defaults\", documentOptions);\n\nexports.Document = Document;\nexports.defaultOptions = defaultOptions;\nexports.scalarOptions = scalarOptions;\n","'use strict';\n\nconst Char = {\n ANCHOR: '&',\n COMMENT: '#',\n TAG: '!',\n DIRECTIVES_END: '-',\n DOCUMENT_END: '.'\n};\nconst Type = {\n ALIAS: 'ALIAS',\n BLANK_LINE: 'BLANK_LINE',\n BLOCK_FOLDED: 'BLOCK_FOLDED',\n BLOCK_LITERAL: 'BLOCK_LITERAL',\n COMMENT: 'COMMENT',\n DIRECTIVE: 'DIRECTIVE',\n DOCUMENT: 'DOCUMENT',\n FLOW_MAP: 'FLOW_MAP',\n FLOW_SEQ: 'FLOW_SEQ',\n MAP: 'MAP',\n MAP_KEY: 'MAP_KEY',\n MAP_VALUE: 'MAP_VALUE',\n PLAIN: 'PLAIN',\n QUOTE_DOUBLE: 'QUOTE_DOUBLE',\n QUOTE_SINGLE: 'QUOTE_SINGLE',\n SEQ: 'SEQ',\n SEQ_ITEM: 'SEQ_ITEM'\n};\nconst defaultTagPrefix = 'tag:yaml.org,2002:';\nconst defaultTags = {\n MAP: 'tag:yaml.org,2002:map',\n SEQ: 'tag:yaml.org,2002:seq',\n STR: 'tag:yaml.org,2002:str'\n};\n\nfunction findLineStarts(src) {\n const ls = [0];\n let offset = src.indexOf('\\n');\n\n while (offset !== -1) {\n offset += 1;\n ls.push(offset);\n offset = src.indexOf('\\n', offset);\n }\n\n return ls;\n}\n\nfunction getSrcInfo(cst) {\n let lineStarts, src;\n\n if (typeof cst === 'string') {\n lineStarts = findLineStarts(cst);\n src = cst;\n } else {\n if (Array.isArray(cst)) cst = cst[0];\n\n if (cst && cst.context) {\n if (!cst.lineStarts) cst.lineStarts = findLineStarts(cst.context.src);\n lineStarts = cst.lineStarts;\n src = cst.context.src;\n }\n }\n\n return {\n lineStarts,\n src\n };\n}\n/**\n * @typedef {Object} LinePos - One-indexed position in the source\n * @property {number} line\n * @property {number} col\n */\n\n/**\n * Determine the line/col position matching a character offset.\n *\n * Accepts a source string or a CST document as the second parameter. With\n * the latter, starting indices for lines are cached in the document as\n * `lineStarts: number[]`.\n *\n * Returns a one-indexed `{ line, col }` location if found, or\n * `undefined` otherwise.\n *\n * @param {number} offset\n * @param {string|Document|Document[]} cst\n * @returns {?LinePos}\n */\n\n\nfunction getLinePos(offset, cst) {\n if (typeof offset !== 'number' || offset < 0) return null;\n const {\n lineStarts,\n src\n } = getSrcInfo(cst);\n if (!lineStarts || !src || offset > src.length) return null;\n\n for (let i = 0; i < lineStarts.length; ++i) {\n const start = lineStarts[i];\n\n if (offset < start) {\n return {\n line: i,\n col: offset - lineStarts[i - 1] + 1\n };\n }\n\n if (offset === start) return {\n line: i + 1,\n col: 1\n };\n }\n\n const line = lineStarts.length;\n return {\n line,\n col: offset - lineStarts[line - 1] + 1\n };\n}\n/**\n * Get a specified line from the source.\n *\n * Accepts a source string or a CST document as the second parameter. With\n * the latter, starting indices for lines are cached in the document as\n * `lineStarts: number[]`.\n *\n * Returns the line as a string if found, or `null` otherwise.\n *\n * @param {number} line One-indexed line number\n * @param {string|Document|Document[]} cst\n * @returns {?string}\n */\n\nfunction getLine(line, cst) {\n const {\n lineStarts,\n src\n } = getSrcInfo(cst);\n if (!lineStarts || !(line >= 1) || line > lineStarts.length) return null;\n const start = lineStarts[line - 1];\n let end = lineStarts[line]; // undefined for last line; that's ok for slice()\n\n while (end && end > start && src[end - 1] === '\\n') --end;\n\n return src.slice(start, end);\n}\n/**\n * Pretty-print the starting line from the source indicated by the range `pos`\n *\n * Trims output to `maxWidth` chars while keeping the starting column visible,\n * using `…` at either end to indicate dropped characters.\n *\n * Returns a two-line string (or `null`) with `\\n` as separator; the second line\n * will hold appropriately indented `^` marks indicating the column range.\n *\n * @param {Object} pos\n * @param {LinePos} pos.start\n * @param {LinePos} [pos.end]\n * @param {string|Document|Document[]*} cst\n * @param {number} [maxWidth=80]\n * @returns {?string}\n */\n\nfunction getPrettyContext({\n start,\n end\n}, cst, maxWidth = 80) {\n let src = getLine(start.line, cst);\n if (!src) return null;\n let {\n col\n } = start;\n\n if (src.length > maxWidth) {\n if (col <= maxWidth - 10) {\n src = src.substr(0, maxWidth - 1) + '…';\n } else {\n const halfWidth = Math.round(maxWidth / 2);\n if (src.length > col + halfWidth) src = src.substr(0, col + halfWidth - 1) + '…';\n col -= src.length - maxWidth;\n src = '…' + src.substr(1 - maxWidth);\n }\n }\n\n let errLen = 1;\n let errEnd = '';\n\n if (end) {\n if (end.line === start.line && col + (end.col - start.col) <= maxWidth + 1) {\n errLen = end.col - start.col;\n } else {\n errLen = Math.min(src.length + 1, maxWidth) - col;\n errEnd = '…';\n }\n }\n\n const offset = col > 1 ? ' '.repeat(col - 1) : '';\n const err = '^'.repeat(errLen);\n return `${src}\\n${offset}${err}${errEnd}`;\n}\n\nclass Range {\n static copy(orig) {\n return new Range(orig.start, orig.end);\n }\n\n constructor(start, end) {\n this.start = start;\n this.end = end || start;\n }\n\n isEmpty() {\n return typeof this.start !== 'number' || !this.end || this.end <= this.start;\n }\n /**\n * Set `origStart` and `origEnd` to point to the original source range for\n * this node, which may differ due to dropped CR characters.\n *\n * @param {number[]} cr - Positions of dropped CR characters\n * @param {number} offset - Starting index of `cr` from the last call\n * @returns {number} - The next offset, matching the one found for `origStart`\n */\n\n\n setOrigRange(cr, offset) {\n const {\n start,\n end\n } = this;\n\n if (cr.length === 0 || end <= cr[0]) {\n this.origStart = start;\n this.origEnd = end;\n return offset;\n }\n\n let i = offset;\n\n while (i < cr.length) {\n if (cr[i] > start) break;else ++i;\n }\n\n this.origStart = start + i;\n const nextOffset = i;\n\n while (i < cr.length) {\n // if end was at \\n, it should now be at \\r\n if (cr[i] >= end) break;else ++i;\n }\n\n this.origEnd = end + i;\n return nextOffset;\n }\n\n}\n\n/** Root class of all nodes */\n\nclass Node {\n static addStringTerminator(src, offset, str) {\n if (str[str.length - 1] === '\\n') return str;\n const next = Node.endOfWhiteSpace(src, offset);\n return next >= src.length || src[next] === '\\n' ? str + '\\n' : str;\n } // ^(---|...)\n\n\n static atDocumentBoundary(src, offset, sep) {\n const ch0 = src[offset];\n if (!ch0) return true;\n const prev = src[offset - 1];\n if (prev && prev !== '\\n') return false;\n\n if (sep) {\n if (ch0 !== sep) return false;\n } else {\n if (ch0 !== Char.DIRECTIVES_END && ch0 !== Char.DOCUMENT_END) return false;\n }\n\n const ch1 = src[offset + 1];\n const ch2 = src[offset + 2];\n if (ch1 !== ch0 || ch2 !== ch0) return false;\n const ch3 = src[offset + 3];\n return !ch3 || ch3 === '\\n' || ch3 === '\\t' || ch3 === ' ';\n }\n\n static endOfIdentifier(src, offset) {\n let ch = src[offset];\n const isVerbatim = ch === '<';\n const notOk = isVerbatim ? ['\\n', '\\t', ' ', '>'] : ['\\n', '\\t', ' ', '[', ']', '{', '}', ','];\n\n while (ch && notOk.indexOf(ch) === -1) ch = src[offset += 1];\n\n if (isVerbatim && ch === '>') offset += 1;\n return offset;\n }\n\n static endOfIndent(src, offset) {\n let ch = src[offset];\n\n while (ch === ' ') ch = src[offset += 1];\n\n return offset;\n }\n\n static endOfLine(src, offset) {\n let ch = src[offset];\n\n while (ch && ch !== '\\n') ch = src[offset += 1];\n\n return offset;\n }\n\n static endOfWhiteSpace(src, offset) {\n let ch = src[offset];\n\n while (ch === '\\t' || ch === ' ') ch = src[offset += 1];\n\n return offset;\n }\n\n static startOfLine(src, offset) {\n let ch = src[offset - 1];\n if (ch === '\\n') return offset;\n\n while (ch && ch !== '\\n') ch = src[offset -= 1];\n\n return offset + 1;\n }\n /**\n * End of indentation, or null if the line's indent level is not more\n * than `indent`\n *\n * @param {string} src\n * @param {number} indent\n * @param {number} lineStart\n * @returns {?number}\n */\n\n\n static endOfBlockIndent(src, indent, lineStart) {\n const inEnd = Node.endOfIndent(src, lineStart);\n\n if (inEnd > lineStart + indent) {\n return inEnd;\n } else {\n const wsEnd = Node.endOfWhiteSpace(src, inEnd);\n const ch = src[wsEnd];\n if (!ch || ch === '\\n') return wsEnd;\n }\n\n return null;\n }\n\n static atBlank(src, offset, endAsBlank) {\n const ch = src[offset];\n return ch === '\\n' || ch === '\\t' || ch === ' ' || endAsBlank && !ch;\n }\n\n static nextNodeIsIndented(ch, indentDiff, indicatorAsIndent) {\n if (!ch || indentDiff < 0) return false;\n if (indentDiff > 0) return true;\n return indicatorAsIndent && ch === '-';\n } // should be at line or string end, or at next non-whitespace char\n\n\n static normalizeOffset(src, offset) {\n const ch = src[offset];\n return !ch ? offset : ch !== '\\n' && src[offset - 1] === '\\n' ? offset - 1 : Node.endOfWhiteSpace(src, offset);\n } // fold single newline into space, multiple newlines to N - 1 newlines\n // presumes src[offset] === '\\n'\n\n\n static foldNewline(src, offset, indent) {\n let inCount = 0;\n let error = false;\n let fold = '';\n let ch = src[offset + 1];\n\n while (ch === ' ' || ch === '\\t' || ch === '\\n') {\n switch (ch) {\n case '\\n':\n inCount = 0;\n offset += 1;\n fold += '\\n';\n break;\n\n case '\\t':\n if (inCount <= indent) error = true;\n offset = Node.endOfWhiteSpace(src, offset + 2) - 1;\n break;\n\n case ' ':\n inCount += 1;\n offset += 1;\n break;\n }\n\n ch = src[offset + 1];\n }\n\n if (!fold) fold = ' ';\n if (ch && inCount <= indent) error = true;\n return {\n fold,\n offset,\n error\n };\n }\n\n constructor(type, props, context) {\n Object.defineProperty(this, 'context', {\n value: context || null,\n writable: true\n });\n this.error = null;\n this.range = null;\n this.valueRange = null;\n this.props = props || [];\n this.type = type;\n this.value = null;\n }\n\n getPropValue(idx, key, skipKey) {\n if (!this.context) return null;\n const {\n src\n } = this.context;\n const prop = this.props[idx];\n return prop && src[prop.start] === key ? src.slice(prop.start + (skipKey ? 1 : 0), prop.end) : null;\n }\n\n get anchor() {\n for (let i = 0; i < this.props.length; ++i) {\n const anchor = this.getPropValue(i, Char.ANCHOR, true);\n if (anchor != null) return anchor;\n }\n\n return null;\n }\n\n get comment() {\n const comments = [];\n\n for (let i = 0; i < this.props.length; ++i) {\n const comment = this.getPropValue(i, Char.COMMENT, true);\n if (comment != null) comments.push(comment);\n }\n\n return comments.length > 0 ? comments.join('\\n') : null;\n }\n\n commentHasRequiredWhitespace(start) {\n const {\n src\n } = this.context;\n if (this.header && start === this.header.end) return false;\n if (!this.valueRange) return false;\n const {\n end\n } = this.valueRange;\n return start !== end || Node.atBlank(src, end - 1);\n }\n\n get hasComment() {\n if (this.context) {\n const {\n src\n } = this.context;\n\n for (let i = 0; i < this.props.length; ++i) {\n if (src[this.props[i].start] === Char.COMMENT) return true;\n }\n }\n\n return false;\n }\n\n get hasProps() {\n if (this.context) {\n const {\n src\n } = this.context;\n\n for (let i = 0; i < this.props.length; ++i) {\n if (src[this.props[i].start] !== Char.COMMENT) return true;\n }\n }\n\n return false;\n }\n\n get includesTrailingLines() {\n return false;\n }\n\n get jsonLike() {\n const jsonLikeTypes = [Type.FLOW_MAP, Type.FLOW_SEQ, Type.QUOTE_DOUBLE, Type.QUOTE_SINGLE];\n return jsonLikeTypes.indexOf(this.type) !== -1;\n }\n\n get rangeAsLinePos() {\n if (!this.range || !this.context) return undefined;\n const start = getLinePos(this.range.start, this.context.root);\n if (!start) return undefined;\n const end = getLinePos(this.range.end, this.context.root);\n return {\n start,\n end\n };\n }\n\n get rawValue() {\n if (!this.valueRange || !this.context) return null;\n const {\n start,\n end\n } = this.valueRange;\n return this.context.src.slice(start, end);\n }\n\n get tag() {\n for (let i = 0; i < this.props.length; ++i) {\n const tag = this.getPropValue(i, Char.TAG, false);\n\n if (tag != null) {\n if (tag[1] === '<') {\n return {\n verbatim: tag.slice(2, -1)\n };\n } else {\n // eslint-disable-next-line no-unused-vars\n const [_, handle, suffix] = tag.match(/^(.*!)([^!]*)$/);\n return {\n handle,\n suffix\n };\n }\n }\n }\n\n return null;\n }\n\n get valueRangeContainsNewline() {\n if (!this.valueRange || !this.context) return false;\n const {\n start,\n end\n } = this.valueRange;\n const {\n src\n } = this.context;\n\n for (let i = start; i < end; ++i) {\n if (src[i] === '\\n') return true;\n }\n\n return false;\n }\n\n parseComment(start) {\n const {\n src\n } = this.context;\n\n if (src[start] === Char.COMMENT) {\n const end = Node.endOfLine(src, start + 1);\n const commentRange = new Range(start, end);\n this.props.push(commentRange);\n return end;\n }\n\n return start;\n }\n /**\n * Populates the `origStart` and `origEnd` values of all ranges for this\n * node. Extended by child classes to handle descendant nodes.\n *\n * @param {number[]} cr - Positions of dropped CR characters\n * @param {number} offset - Starting index of `cr` from the last call\n * @returns {number} - The next offset, matching the one found for `origStart`\n */\n\n\n setOrigRanges(cr, offset) {\n if (this.range) offset = this.range.setOrigRange(cr, offset);\n if (this.valueRange) this.valueRange.setOrigRange(cr, offset);\n this.props.forEach(prop => prop.setOrigRange(cr, offset));\n return offset;\n }\n\n toString() {\n const {\n context: {\n src\n },\n range,\n value\n } = this;\n if (value != null) return value;\n const str = src.slice(range.start, range.end);\n return Node.addStringTerminator(src, range.end, str);\n }\n\n}\n\nclass YAMLError extends Error {\n constructor(name, source, message) {\n if (!message || !(source instanceof Node)) throw new Error(`Invalid arguments for new ${name}`);\n super();\n this.name = name;\n this.message = message;\n this.source = source;\n }\n\n makePretty() {\n if (!this.source) return;\n this.nodeType = this.source.type;\n const cst = this.source.context && this.source.context.root;\n\n if (typeof this.offset === 'number') {\n this.range = new Range(this.offset, this.offset + 1);\n const start = cst && getLinePos(this.offset, cst);\n\n if (start) {\n const end = {\n line: start.line,\n col: start.col + 1\n };\n this.linePos = {\n start,\n end\n };\n }\n\n delete this.offset;\n } else {\n this.range = this.source.range;\n this.linePos = this.source.rangeAsLinePos;\n }\n\n if (this.linePos) {\n const {\n line,\n col\n } = this.linePos.start;\n this.message += ` at line ${line}, column ${col}`;\n const ctx = cst && getPrettyContext(this.linePos, cst);\n if (ctx) this.message += `:\\n\\n${ctx}\\n`;\n }\n\n delete this.source;\n }\n\n}\nclass YAMLReferenceError extends YAMLError {\n constructor(source, message) {\n super('YAMLReferenceError', source, message);\n }\n\n}\nclass YAMLSemanticError extends YAMLError {\n constructor(source, message) {\n super('YAMLSemanticError', source, message);\n }\n\n}\nclass YAMLSyntaxError extends YAMLError {\n constructor(source, message) {\n super('YAMLSyntaxError', source, message);\n }\n\n}\nclass YAMLWarning extends YAMLError {\n constructor(source, message) {\n super('YAMLWarning', source, message);\n }\n\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nclass PlainValue extends Node {\n static endOfLine(src, start, inFlow) {\n let ch = src[start];\n let offset = start;\n\n while (ch && ch !== '\\n') {\n if (inFlow && (ch === '[' || ch === ']' || ch === '{' || ch === '}' || ch === ',')) break;\n const next = src[offset + 1];\n if (ch === ':' && (!next || next === '\\n' || next === '\\t' || next === ' ' || inFlow && next === ',')) break;\n if ((ch === ' ' || ch === '\\t') && next === '#') break;\n offset += 1;\n ch = next;\n }\n\n return offset;\n }\n\n get strValue() {\n if (!this.valueRange || !this.context) return null;\n let {\n start,\n end\n } = this.valueRange;\n const {\n src\n } = this.context;\n let ch = src[end - 1];\n\n while (start < end && (ch === '\\n' || ch === '\\t' || ch === ' ')) ch = src[--end - 1];\n\n let str = '';\n\n for (let i = start; i < end; ++i) {\n const ch = src[i];\n\n if (ch === '\\n') {\n const {\n fold,\n offset\n } = Node.foldNewline(src, i, -1);\n str += fold;\n i = offset;\n } else if (ch === ' ' || ch === '\\t') {\n // trim trailing whitespace\n const wsStart = i;\n let next = src[i + 1];\n\n while (i < end && (next === ' ' || next === '\\t')) {\n i += 1;\n next = src[i + 1];\n }\n\n if (next !== '\\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch;\n } else {\n str += ch;\n }\n }\n\n const ch0 = src[start];\n\n switch (ch0) {\n case '\\t':\n {\n const msg = 'Plain value cannot start with a tab character';\n const errors = [new YAMLSemanticError(this, msg)];\n return {\n errors,\n str\n };\n }\n\n case '@':\n case '`':\n {\n const msg = `Plain value cannot start with reserved character ${ch0}`;\n const errors = [new YAMLSemanticError(this, msg)];\n return {\n errors,\n str\n };\n }\n\n default:\n return str;\n }\n }\n\n parseBlockValue(start) {\n const {\n indent,\n inFlow,\n src\n } = this.context;\n let offset = start;\n let valueEnd = start;\n\n for (let ch = src[offset]; ch === '\\n'; ch = src[offset]) {\n if (Node.atDocumentBoundary(src, offset + 1)) break;\n const end = Node.endOfBlockIndent(src, indent, offset + 1);\n if (end === null || src[end] === '#') break;\n\n if (src[end] === '\\n') {\n offset = end;\n } else {\n valueEnd = PlainValue.endOfLine(src, end, inFlow);\n offset = valueEnd;\n }\n }\n\n if (this.valueRange.isEmpty()) this.valueRange.start = start;\n this.valueRange.end = valueEnd;\n return valueEnd;\n }\n /**\n * Parses a plain value from the source\n *\n * Accepted forms are:\n * ```\n * #comment\n *\n * first line\n *\n * first line #comment\n *\n * first line\n * block\n * lines\n *\n * #comment\n * block\n * lines\n * ```\n * where block lines are empty or have an indent level greater than `indent`.\n *\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this scalar, may be `\\n`\n */\n\n\n parse(context, start) {\n this.context = context;\n const {\n inFlow,\n src\n } = context;\n let offset = start;\n const ch = src[offset];\n\n if (ch && ch !== '#' && ch !== '\\n') {\n offset = PlainValue.endOfLine(src, start, inFlow);\n }\n\n this.valueRange = new Range(start, offset);\n offset = Node.endOfWhiteSpace(src, offset);\n offset = this.parseComment(offset);\n\n if (!this.hasComment || this.valueRange.isEmpty()) {\n offset = this.parseBlockValue(offset);\n }\n\n return offset;\n }\n\n}\n\nexports.Char = Char;\nexports.Node = Node;\nexports.PlainValue = PlainValue;\nexports.Range = Range;\nexports.Type = Type;\nexports.YAMLError = YAMLError;\nexports.YAMLReferenceError = YAMLReferenceError;\nexports.YAMLSemanticError = YAMLSemanticError;\nexports.YAMLSyntaxError = YAMLSyntaxError;\nexports.YAMLWarning = YAMLWarning;\nexports._defineProperty = _defineProperty;\nexports.defaultTagPrefix = defaultTagPrefix;\nexports.defaultTags = defaultTags;\n","'use strict';\n\nvar PlainValue = require('./PlainValue-ec8e588e.js');\nvar resolveSeq = require('./resolveSeq-4a68b39b.js');\nvar warnings = require('./warnings-39684f17.js');\n\nfunction createMap(schema, obj, ctx) {\n const map = new resolveSeq.YAMLMap(schema);\n\n if (obj instanceof Map) {\n for (const [key, value] of obj) map.items.push(schema.createPair(key, value, ctx));\n } else if (obj && typeof obj === 'object') {\n for (const key of Object.keys(obj)) map.items.push(schema.createPair(key, obj[key], ctx));\n }\n\n if (typeof schema.sortMapEntries === 'function') {\n map.items.sort(schema.sortMapEntries);\n }\n\n return map;\n}\n\nconst map = {\n createNode: createMap,\n default: true,\n nodeClass: resolveSeq.YAMLMap,\n tag: 'tag:yaml.org,2002:map',\n resolve: resolveSeq.resolveMap\n};\n\nfunction createSeq(schema, obj, ctx) {\n const seq = new resolveSeq.YAMLSeq(schema);\n\n if (obj && obj[Symbol.iterator]) {\n for (const it of obj) {\n const v = schema.createNode(it, ctx.wrapScalars, null, ctx);\n seq.items.push(v);\n }\n }\n\n return seq;\n}\n\nconst seq = {\n createNode: createSeq,\n default: true,\n nodeClass: resolveSeq.YAMLSeq,\n tag: 'tag:yaml.org,2002:seq',\n resolve: resolveSeq.resolveSeq\n};\n\nconst string = {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: resolveSeq.resolveString,\n\n stringify(item, ctx, onComment, onChompKeep) {\n ctx = Object.assign({\n actualString: true\n }, ctx);\n return resolveSeq.stringifyString(item, ctx, onComment, onChompKeep);\n },\n\n options: resolveSeq.strOptions\n};\n\nconst failsafe = [map, seq, string];\n\n/* global BigInt */\n\nconst intIdentify = value => typeof value === 'bigint' || Number.isInteger(value);\n\nconst intResolve = (src, part, radix) => resolveSeq.intOptions.asBigInt ? BigInt(src) : parseInt(part, radix);\n\nfunction intStringify(node, radix, prefix) {\n const {\n value\n } = node;\n if (intIdentify(value) && value >= 0) return prefix + value.toString(radix);\n return resolveSeq.stringifyNumber(node);\n}\n\nconst nullObj = {\n identify: value => value == null,\n createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null,\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^(?:~|[Nn]ull|NULL)?$/,\n resolve: () => null,\n options: resolveSeq.nullOptions,\n stringify: () => resolveSeq.nullOptions.nullStr\n};\nconst boolObj = {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,\n resolve: str => str[0] === 't' || str[0] === 'T',\n options: resolveSeq.boolOptions,\n stringify: ({\n value\n }) => value ? resolveSeq.boolOptions.trueStr : resolveSeq.boolOptions.falseStr\n};\nconst octObj = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^0o([0-7]+)$/,\n resolve: (str, oct) => intResolve(str, oct, 8),\n options: resolveSeq.intOptions,\n stringify: node => intStringify(node, 8, '0o')\n};\nconst intObj = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9]+$/,\n resolve: str => intResolve(str, str, 10),\n options: resolveSeq.intOptions,\n stringify: resolveSeq.stringifyNumber\n};\nconst hexObj = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^0x([0-9a-fA-F]+)$/,\n resolve: (str, hex) => intResolve(str, hex, 16),\n options: resolveSeq.intOptions,\n stringify: node => intStringify(node, 16, '0x')\n};\nconst nanObj = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^(?:[-+]?\\.inf|(\\.nan))$/i,\n resolve: (str, nan) => nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY,\n stringify: resolveSeq.stringifyNumber\n};\nconst expObj = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+(?:\\.[0-9]*)?)[eE][-+]?[0-9]+$/,\n resolve: str => parseFloat(str),\n stringify: ({\n value\n }) => Number(value).toExponential()\n};\nconst floatObj = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:\\.([0-9]+)|[0-9]+\\.([0-9]*))$/,\n\n resolve(str, frac1, frac2) {\n const frac = frac1 || frac2;\n const node = new resolveSeq.Scalar(parseFloat(str));\n if (frac && frac[frac.length - 1] === '0') node.minFractionDigits = frac.length;\n return node;\n },\n\n stringify: resolveSeq.stringifyNumber\n};\nconst core = failsafe.concat([nullObj, boolObj, octObj, intObj, hexObj, nanObj, expObj, floatObj]);\n\n/* global BigInt */\n\nconst intIdentify$1 = value => typeof value === 'bigint' || Number.isInteger(value);\n\nconst stringifyJSON = ({\n value\n}) => JSON.stringify(value);\n\nconst json = [map, seq, {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: resolveSeq.resolveString,\n stringify: stringifyJSON\n}, {\n identify: value => value == null,\n createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null,\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^null$/,\n resolve: () => null,\n stringify: stringifyJSON\n}, {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^true|false$/,\n resolve: str => str === 'true',\n stringify: stringifyJSON\n}, {\n identify: intIdentify$1,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^-?(?:0|[1-9][0-9]*)$/,\n resolve: str => resolveSeq.intOptions.asBigInt ? BigInt(str) : parseInt(str, 10),\n stringify: ({\n value\n }) => intIdentify$1(value) ? value.toString() : JSON.stringify(value)\n}, {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,\n resolve: str => parseFloat(str),\n stringify: stringifyJSON\n}];\n\njson.scalarFallback = str => {\n throw new SyntaxError(`Unresolved plain scalar ${JSON.stringify(str)}`);\n};\n\n/* global BigInt */\n\nconst boolStringify = ({\n value\n}) => value ? resolveSeq.boolOptions.trueStr : resolveSeq.boolOptions.falseStr;\n\nconst intIdentify$2 = value => typeof value === 'bigint' || Number.isInteger(value);\n\nfunction intResolve$1(sign, src, radix) {\n let str = src.replace(/_/g, '');\n\n if (resolveSeq.intOptions.asBigInt) {\n switch (radix) {\n case 2:\n str = `0b${str}`;\n break;\n\n case 8:\n str = `0o${str}`;\n break;\n\n case 16:\n str = `0x${str}`;\n break;\n }\n\n const n = BigInt(str);\n return sign === '-' ? BigInt(-1) * n : n;\n }\n\n const n = parseInt(str, radix);\n return sign === '-' ? -1 * n : n;\n}\n\nfunction intStringify$1(node, radix, prefix) {\n const {\n value\n } = node;\n\n if (intIdentify$2(value)) {\n const str = value.toString(radix);\n return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;\n }\n\n return resolveSeq.stringifyNumber(node);\n}\n\nconst yaml11 = failsafe.concat([{\n identify: value => value == null,\n createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null,\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^(?:~|[Nn]ull|NULL)?$/,\n resolve: () => null,\n options: resolveSeq.nullOptions,\n stringify: () => resolveSeq.nullOptions.nullStr\n}, {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,\n resolve: () => true,\n options: resolveSeq.boolOptions,\n stringify: boolStringify\n}, {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,\n resolve: () => false,\n options: resolveSeq.boolOptions,\n stringify: boolStringify\n}, {\n identify: intIdentify$2,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'BIN',\n test: /^([-+]?)0b([0-1_]+)$/,\n resolve: (str, sign, bin) => intResolve$1(sign, bin, 2),\n stringify: node => intStringify$1(node, 2, '0b')\n}, {\n identify: intIdentify$2,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^([-+]?)0([0-7_]+)$/,\n resolve: (str, sign, oct) => intResolve$1(sign, oct, 8),\n stringify: node => intStringify$1(node, 8, '0')\n}, {\n identify: intIdentify$2,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^([-+]?)([0-9][0-9_]*)$/,\n resolve: (str, sign, abs) => intResolve$1(sign, abs, 10),\n stringify: resolveSeq.stringifyNumber\n}, {\n identify: intIdentify$2,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^([-+]?)0x([0-9a-fA-F_]+)$/,\n resolve: (str, sign, hex) => intResolve$1(sign, hex, 16),\n stringify: node => intStringify$1(node, 16, '0x')\n}, {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^(?:[-+]?\\.inf|(\\.nan))$/i,\n resolve: (str, nan) => nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY,\n stringify: resolveSeq.stringifyNumber\n}, {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?([0-9][0-9_]*)?(\\.[0-9_]*)?[eE][-+]?[0-9]+$/,\n resolve: str => parseFloat(str.replace(/_/g, '')),\n stringify: ({\n value\n }) => Number(value).toExponential()\n}, {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:[0-9][0-9_]*)?\\.([0-9_]*)$/,\n\n resolve(str, frac) {\n const node = new resolveSeq.Scalar(parseFloat(str.replace(/_/g, '')));\n\n if (frac) {\n const f = frac.replace(/_/g, '');\n if (f[f.length - 1] === '0') node.minFractionDigits = f.length;\n }\n\n return node;\n },\n\n stringify: resolveSeq.stringifyNumber\n}], warnings.binary, warnings.omap, warnings.pairs, warnings.set, warnings.intTime, warnings.floatTime, warnings.timestamp);\n\nconst schemas = {\n core,\n failsafe,\n json,\n yaml11\n};\nconst tags = {\n binary: warnings.binary,\n bool: boolObj,\n float: floatObj,\n floatExp: expObj,\n floatNaN: nanObj,\n floatTime: warnings.floatTime,\n int: intObj,\n intHex: hexObj,\n intOct: octObj,\n intTime: warnings.intTime,\n map,\n null: nullObj,\n omap: warnings.omap,\n pairs: warnings.pairs,\n seq,\n set: warnings.set,\n timestamp: warnings.timestamp\n};\n\nfunction findTagObject(value, tagName, tags) {\n if (tagName) {\n const match = tags.filter(t => t.tag === tagName);\n const tagObj = match.find(t => !t.format) || match[0];\n if (!tagObj) throw new Error(`Tag ${tagName} not found`);\n return tagObj;\n } // TODO: deprecate/remove class check\n\n\n return tags.find(t => (t.identify && t.identify(value) || t.class && value instanceof t.class) && !t.format);\n}\n\nfunction createNode(value, tagName, ctx) {\n if (value instanceof resolveSeq.Node) return value;\n const {\n defaultPrefix,\n onTagObj,\n prevObjects,\n schema,\n wrapScalars\n } = ctx;\n if (tagName && tagName.startsWith('!!')) tagName = defaultPrefix + tagName.slice(2);\n let tagObj = findTagObject(value, tagName, schema.tags);\n\n if (!tagObj) {\n if (typeof value.toJSON === 'function') value = value.toJSON();\n if (typeof value !== 'object') return wrapScalars ? new resolveSeq.Scalar(value) : value;\n tagObj = value instanceof Map ? map : value[Symbol.iterator] ? seq : map;\n }\n\n if (onTagObj) {\n onTagObj(tagObj);\n delete ctx.onTagObj;\n } // Detect duplicate references to the same object & use Alias nodes for all\n // after first. The `obj` wrapper allows for circular references to resolve.\n\n\n const obj = {};\n\n if (value && typeof value === 'object' && prevObjects) {\n const prev = prevObjects.get(value);\n\n if (prev) {\n const alias = new resolveSeq.Alias(prev); // leaves source dirty; must be cleaned by caller\n\n ctx.aliasNodes.push(alias); // defined along with prevObjects\n\n return alias;\n }\n\n obj.value = value;\n prevObjects.set(value, obj);\n }\n\n obj.node = tagObj.createNode ? tagObj.createNode(ctx.schema, value, ctx) : wrapScalars ? new resolveSeq.Scalar(value) : value;\n if (tagName && obj.node instanceof resolveSeq.Node) obj.node.tag = tagName;\n return obj.node;\n}\n\nfunction getSchemaTags(schemas, knownTags, customTags, schemaId) {\n let tags = schemas[schemaId.replace(/\\W/g, '')]; // 'yaml-1.1' -> 'yaml11'\n\n if (!tags) {\n const keys = Object.keys(schemas).map(key => JSON.stringify(key)).join(', ');\n throw new Error(`Unknown schema \"${schemaId}\"; use one of ${keys}`);\n }\n\n if (Array.isArray(customTags)) {\n for (const tag of customTags) tags = tags.concat(tag);\n } else if (typeof customTags === 'function') {\n tags = customTags(tags.slice());\n }\n\n for (let i = 0; i < tags.length; ++i) {\n const tag = tags[i];\n\n if (typeof tag === 'string') {\n const tagObj = knownTags[tag];\n\n if (!tagObj) {\n const keys = Object.keys(knownTags).map(key => JSON.stringify(key)).join(', ');\n throw new Error(`Unknown custom tag \"${tag}\"; use one of ${keys}`);\n }\n\n tags[i] = tagObj;\n }\n }\n\n return tags;\n}\n\nconst sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;\n\nclass Schema {\n // TODO: remove in v2\n // TODO: remove in v2\n constructor({\n customTags,\n merge,\n schema,\n sortMapEntries,\n tags: deprecatedCustomTags\n }) {\n this.merge = !!merge;\n this.name = schema;\n this.sortMapEntries = sortMapEntries === true ? sortMapEntriesByKey : sortMapEntries || null;\n if (!customTags && deprecatedCustomTags) warnings.warnOptionDeprecation('tags', 'customTags');\n this.tags = getSchemaTags(schemas, tags, customTags || deprecatedCustomTags, schema);\n }\n\n createNode(value, wrapScalars, tagName, ctx) {\n const baseCtx = {\n defaultPrefix: Schema.defaultPrefix,\n schema: this,\n wrapScalars\n };\n const createCtx = ctx ? Object.assign(ctx, baseCtx) : baseCtx;\n return createNode(value, tagName, createCtx);\n }\n\n createPair(key, value, ctx) {\n if (!ctx) ctx = {\n wrapScalars: true\n };\n const k = this.createNode(key, ctx.wrapScalars, null, ctx);\n const v = this.createNode(value, ctx.wrapScalars, null, ctx);\n return new resolveSeq.Pair(k, v);\n }\n\n}\n\nPlainValue._defineProperty(Schema, \"defaultPrefix\", PlainValue.defaultTagPrefix);\n\nPlainValue._defineProperty(Schema, \"defaultTags\", PlainValue.defaultTags);\n\nexports.Schema = Schema;\n","'use strict';\n\nvar PlainValue = require('./PlainValue-ec8e588e.js');\nvar parseCst = require('./parse-cst.js');\nrequire('./resolveSeq-4a68b39b.js');\nvar Document$1 = require('./Document-2cf6b08c.js');\nvar Schema = require('./Schema-42e9705c.js');\nvar warnings = require('./warnings-39684f17.js');\n\nfunction createNode(value, wrapScalars = true, tag) {\n if (tag === undefined && typeof wrapScalars === 'string') {\n tag = wrapScalars;\n wrapScalars = true;\n }\n\n const options = Object.assign({}, Document$1.Document.defaults[Document$1.defaultOptions.version], Document$1.defaultOptions);\n const schema = new Schema.Schema(options);\n return schema.createNode(value, wrapScalars, tag);\n}\n\nclass Document extends Document$1.Document {\n constructor(options) {\n super(Object.assign({}, Document$1.defaultOptions, options));\n }\n\n}\n\nfunction parseAllDocuments(src, options) {\n const stream = [];\n let prev;\n\n for (const cstDoc of parseCst.parse(src)) {\n const doc = new Document(options);\n doc.parse(cstDoc, prev);\n stream.push(doc);\n prev = doc;\n }\n\n return stream;\n}\n\nfunction parseDocument(src, options) {\n const cst = parseCst.parse(src);\n const doc = new Document(options).parse(cst[0]);\n\n if (cst.length > 1) {\n const errMsg = 'Source contains multiple documents; please use YAML.parseAllDocuments()';\n doc.errors.unshift(new PlainValue.YAMLSemanticError(cst[1], errMsg));\n }\n\n return doc;\n}\n\nfunction parse(src, options) {\n const doc = parseDocument(src, options);\n doc.warnings.forEach(warning => warnings.warn(warning));\n if (doc.errors.length > 0) throw doc.errors[0];\n return doc.toJSON();\n}\n\nfunction stringify(value, options) {\n const doc = new Document(options);\n doc.contents = value;\n return String(doc);\n}\n\nconst YAML = {\n createNode,\n defaultOptions: Document$1.defaultOptions,\n Document,\n parse,\n parseAllDocuments,\n parseCST: parseCst.parse,\n parseDocument,\n scalarOptions: Document$1.scalarOptions,\n stringify\n};\n\nexports.YAML = YAML;\n","'use strict';\n\nvar PlainValue = require('./PlainValue-ec8e588e.js');\n\nclass BlankLine extends PlainValue.Node {\n constructor() {\n super(PlainValue.Type.BLANK_LINE);\n }\n /* istanbul ignore next */\n\n\n get includesTrailingLines() {\n // This is never called from anywhere, but if it were,\n // this is the value it should return.\n return true;\n }\n /**\n * Parses a blank line from the source\n *\n * @param {ParseContext} context\n * @param {number} start - Index of first \\n character\n * @returns {number} - Index of the character after this\n */\n\n\n parse(context, start) {\n this.context = context;\n this.range = new PlainValue.Range(start, start + 1);\n return start + 1;\n }\n\n}\n\nclass CollectionItem extends PlainValue.Node {\n constructor(type, props) {\n super(type, props);\n this.node = null;\n }\n\n get includesTrailingLines() {\n return !!this.node && this.node.includesTrailingLines;\n }\n /**\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this\n */\n\n\n parse(context, start) {\n this.context = context;\n const {\n parseNode,\n src\n } = context;\n let {\n atLineStart,\n lineStart\n } = context;\n if (!atLineStart && this.type === PlainValue.Type.SEQ_ITEM) this.error = new PlainValue.YAMLSemanticError(this, 'Sequence items must not have preceding content on the same line');\n const indent = atLineStart ? start - lineStart : context.indent;\n let offset = PlainValue.Node.endOfWhiteSpace(src, start + 1);\n let ch = src[offset];\n const inlineComment = ch === '#';\n const comments = [];\n let blankLine = null;\n\n while (ch === '\\n' || ch === '#') {\n if (ch === '#') {\n const end = PlainValue.Node.endOfLine(src, offset + 1);\n comments.push(new PlainValue.Range(offset, end));\n offset = end;\n } else {\n atLineStart = true;\n lineStart = offset + 1;\n const wsEnd = PlainValue.Node.endOfWhiteSpace(src, lineStart);\n\n if (src[wsEnd] === '\\n' && comments.length === 0) {\n blankLine = new BlankLine();\n lineStart = blankLine.parse({\n src\n }, lineStart);\n }\n\n offset = PlainValue.Node.endOfIndent(src, lineStart);\n }\n\n ch = src[offset];\n }\n\n if (PlainValue.Node.nextNodeIsIndented(ch, offset - (lineStart + indent), this.type !== PlainValue.Type.SEQ_ITEM)) {\n this.node = parseNode({\n atLineStart,\n inCollection: false,\n indent,\n lineStart,\n parent: this\n }, offset);\n } else if (ch && lineStart > start + 1) {\n offset = lineStart - 1;\n }\n\n if (this.node) {\n if (blankLine) {\n // Only blank lines preceding non-empty nodes are captured. Note that\n // this means that collection item range start indices do not always\n // increase monotonically. -- eemeli/yaml#126\n const items = context.parent.items || context.parent.contents;\n if (items) items.push(blankLine);\n }\n\n if (comments.length) Array.prototype.push.apply(this.props, comments);\n offset = this.node.range.end;\n } else {\n if (inlineComment) {\n const c = comments[0];\n this.props.push(c);\n offset = c.end;\n } else {\n offset = PlainValue.Node.endOfLine(src, start + 1);\n }\n }\n\n const end = this.node ? this.node.valueRange.end : offset;\n this.valueRange = new PlainValue.Range(start, end);\n return offset;\n }\n\n setOrigRanges(cr, offset) {\n offset = super.setOrigRanges(cr, offset);\n return this.node ? this.node.setOrigRanges(cr, offset) : offset;\n }\n\n toString() {\n const {\n context: {\n src\n },\n node,\n range,\n value\n } = this;\n if (value != null) return value;\n const str = node ? src.slice(range.start, node.range.start) + String(node) : src.slice(range.start, range.end);\n return PlainValue.Node.addStringTerminator(src, range.end, str);\n }\n\n}\n\nclass Comment extends PlainValue.Node {\n constructor() {\n super(PlainValue.Type.COMMENT);\n }\n /**\n * Parses a comment line from the source\n *\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this scalar\n */\n\n\n parse(context, start) {\n this.context = context;\n const offset = this.parseComment(start);\n this.range = new PlainValue.Range(start, offset);\n return offset;\n }\n\n}\n\nfunction grabCollectionEndComments(node) {\n let cnode = node;\n\n while (cnode instanceof CollectionItem) cnode = cnode.node;\n\n if (!(cnode instanceof Collection)) return null;\n const len = cnode.items.length;\n let ci = -1;\n\n for (let i = len - 1; i >= 0; --i) {\n const n = cnode.items[i];\n\n if (n.type === PlainValue.Type.COMMENT) {\n // Keep sufficiently indented comments with preceding node\n const {\n indent,\n lineStart\n } = n.context;\n if (indent > 0 && n.range.start >= lineStart + indent) break;\n ci = i;\n } else if (n.type === PlainValue.Type.BLANK_LINE) ci = i;else break;\n }\n\n if (ci === -1) return null;\n const ca = cnode.items.splice(ci, len - ci);\n const prevEnd = ca[0].range.start;\n\n while (true) {\n cnode.range.end = prevEnd;\n if (cnode.valueRange && cnode.valueRange.end > prevEnd) cnode.valueRange.end = prevEnd;\n if (cnode === node) break;\n cnode = cnode.context.parent;\n }\n\n return ca;\n}\nclass Collection extends PlainValue.Node {\n static nextContentHasIndent(src, offset, indent) {\n const lineStart = PlainValue.Node.endOfLine(src, offset) + 1;\n offset = PlainValue.Node.endOfWhiteSpace(src, lineStart);\n const ch = src[offset];\n if (!ch) return false;\n if (offset >= lineStart + indent) return true;\n if (ch !== '#' && ch !== '\\n') return false;\n return Collection.nextContentHasIndent(src, offset, indent);\n }\n\n constructor(firstItem) {\n super(firstItem.type === PlainValue.Type.SEQ_ITEM ? PlainValue.Type.SEQ : PlainValue.Type.MAP);\n\n for (let i = firstItem.props.length - 1; i >= 0; --i) {\n if (firstItem.props[i].start < firstItem.context.lineStart) {\n // props on previous line are assumed by the collection\n this.props = firstItem.props.slice(0, i + 1);\n firstItem.props = firstItem.props.slice(i + 1);\n const itemRange = firstItem.props[0] || firstItem.valueRange;\n firstItem.range.start = itemRange.start;\n break;\n }\n }\n\n this.items = [firstItem];\n const ec = grabCollectionEndComments(firstItem);\n if (ec) Array.prototype.push.apply(this.items, ec);\n }\n\n get includesTrailingLines() {\n return this.items.length > 0;\n }\n /**\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this\n */\n\n\n parse(context, start) {\n this.context = context;\n const {\n parseNode,\n src\n } = context; // It's easier to recalculate lineStart here rather than tracking down the\n // last context from which to read it -- eemeli/yaml#2\n\n let lineStart = PlainValue.Node.startOfLine(src, start);\n const firstItem = this.items[0]; // First-item context needs to be correct for later comment handling\n // -- eemeli/yaml#17\n\n firstItem.context.parent = this;\n this.valueRange = PlainValue.Range.copy(firstItem.valueRange);\n const indent = firstItem.range.start - firstItem.context.lineStart;\n let offset = start;\n offset = PlainValue.Node.normalizeOffset(src, offset);\n let ch = src[offset];\n let atLineStart = PlainValue.Node.endOfWhiteSpace(src, lineStart) === offset;\n let prevIncludesTrailingLines = false;\n\n while (ch) {\n while (ch === '\\n' || ch === '#') {\n if (atLineStart && ch === '\\n' && !prevIncludesTrailingLines) {\n const blankLine = new BlankLine();\n offset = blankLine.parse({\n src\n }, offset);\n this.valueRange.end = offset;\n\n if (offset >= src.length) {\n ch = null;\n break;\n }\n\n this.items.push(blankLine);\n offset -= 1; // blankLine.parse() consumes terminal newline\n } else if (ch === '#') {\n if (offset < lineStart + indent && !Collection.nextContentHasIndent(src, offset, indent)) {\n return offset;\n }\n\n const comment = new Comment();\n offset = comment.parse({\n indent,\n lineStart,\n src\n }, offset);\n this.items.push(comment);\n this.valueRange.end = offset;\n\n if (offset >= src.length) {\n ch = null;\n break;\n }\n }\n\n lineStart = offset + 1;\n offset = PlainValue.Node.endOfIndent(src, lineStart);\n\n if (PlainValue.Node.atBlank(src, offset)) {\n const wsEnd = PlainValue.Node.endOfWhiteSpace(src, offset);\n const next = src[wsEnd];\n\n if (!next || next === '\\n' || next === '#') {\n offset = wsEnd;\n }\n }\n\n ch = src[offset];\n atLineStart = true;\n }\n\n if (!ch) {\n break;\n }\n\n if (offset !== lineStart + indent && (atLineStart || ch !== ':')) {\n if (offset < lineStart + indent) {\n if (lineStart > start) offset = lineStart;\n break;\n } else if (!this.error) {\n const msg = 'All collection items must start at the same column';\n this.error = new PlainValue.YAMLSyntaxError(this, msg);\n }\n }\n\n if (firstItem.type === PlainValue.Type.SEQ_ITEM) {\n if (ch !== '-') {\n if (lineStart > start) offset = lineStart;\n break;\n }\n } else if (ch === '-' && !this.error) {\n // map key may start with -, as long as it's followed by a non-whitespace char\n const next = src[offset + 1];\n\n if (!next || next === '\\n' || next === '\\t' || next === ' ') {\n const msg = 'A collection cannot be both a mapping and a sequence';\n this.error = new PlainValue.YAMLSyntaxError(this, msg);\n }\n }\n\n const node = parseNode({\n atLineStart,\n inCollection: true,\n indent,\n lineStart,\n parent: this\n }, offset);\n if (!node) return offset; // at next document start\n\n this.items.push(node);\n this.valueRange.end = node.valueRange.end;\n offset = PlainValue.Node.normalizeOffset(src, node.range.end);\n ch = src[offset];\n atLineStart = false;\n prevIncludesTrailingLines = node.includesTrailingLines; // Need to reset lineStart and atLineStart here if preceding node's range\n // has advanced to check the current line's indentation level\n // -- eemeli/yaml#10 & eemeli/yaml#38\n\n if (ch) {\n let ls = offset - 1;\n let prev = src[ls];\n\n while (prev === ' ' || prev === '\\t') prev = src[--ls];\n\n if (prev === '\\n') {\n lineStart = ls + 1;\n atLineStart = true;\n }\n }\n\n const ec = grabCollectionEndComments(node);\n if (ec) Array.prototype.push.apply(this.items, ec);\n }\n\n return offset;\n }\n\n setOrigRanges(cr, offset) {\n offset = super.setOrigRanges(cr, offset);\n this.items.forEach(node => {\n offset = node.setOrigRanges(cr, offset);\n });\n return offset;\n }\n\n toString() {\n const {\n context: {\n src\n },\n items,\n range,\n value\n } = this;\n if (value != null) return value;\n let str = src.slice(range.start, items[0].range.start) + String(items[0]);\n\n for (let i = 1; i < items.length; ++i) {\n const item = items[i];\n const {\n atLineStart,\n indent\n } = item.context;\n if (atLineStart) for (let i = 0; i < indent; ++i) str += ' ';\n str += String(item);\n }\n\n return PlainValue.Node.addStringTerminator(src, range.end, str);\n }\n\n}\n\nclass Directive extends PlainValue.Node {\n constructor() {\n super(PlainValue.Type.DIRECTIVE);\n this.name = null;\n }\n\n get parameters() {\n const raw = this.rawValue;\n return raw ? raw.trim().split(/[ \\t]+/) : [];\n }\n\n parseName(start) {\n const {\n src\n } = this.context;\n let offset = start;\n let ch = src[offset];\n\n while (ch && ch !== '\\n' && ch !== '\\t' && ch !== ' ') ch = src[offset += 1];\n\n this.name = src.slice(start, offset);\n return offset;\n }\n\n parseParameters(start) {\n const {\n src\n } = this.context;\n let offset = start;\n let ch = src[offset];\n\n while (ch && ch !== '\\n' && ch !== '#') ch = src[offset += 1];\n\n this.valueRange = new PlainValue.Range(start, offset);\n return offset;\n }\n\n parse(context, start) {\n this.context = context;\n let offset = this.parseName(start + 1);\n offset = this.parseParameters(offset);\n offset = this.parseComment(offset);\n this.range = new PlainValue.Range(start, offset);\n return offset;\n }\n\n}\n\nclass Document extends PlainValue.Node {\n static startCommentOrEndBlankLine(src, start) {\n const offset = PlainValue.Node.endOfWhiteSpace(src, start);\n const ch = src[offset];\n return ch === '#' || ch === '\\n' ? offset : start;\n }\n\n constructor() {\n super(PlainValue.Type.DOCUMENT);\n this.directives = null;\n this.contents = null;\n this.directivesEndMarker = null;\n this.documentEndMarker = null;\n }\n\n parseDirectives(start) {\n const {\n src\n } = this.context;\n this.directives = [];\n let atLineStart = true;\n let hasDirectives = false;\n let offset = start;\n\n while (!PlainValue.Node.atDocumentBoundary(src, offset, PlainValue.Char.DIRECTIVES_END)) {\n offset = Document.startCommentOrEndBlankLine(src, offset);\n\n switch (src[offset]) {\n case '\\n':\n if (atLineStart) {\n const blankLine = new BlankLine();\n offset = blankLine.parse({\n src\n }, offset);\n\n if (offset < src.length) {\n this.directives.push(blankLine);\n }\n } else {\n offset += 1;\n atLineStart = true;\n }\n\n break;\n\n case '#':\n {\n const comment = new Comment();\n offset = comment.parse({\n src\n }, offset);\n this.directives.push(comment);\n atLineStart = false;\n }\n break;\n\n case '%':\n {\n const directive = new Directive();\n offset = directive.parse({\n parent: this,\n src\n }, offset);\n this.directives.push(directive);\n hasDirectives = true;\n atLineStart = false;\n }\n break;\n\n default:\n if (hasDirectives) {\n this.error = new PlainValue.YAMLSemanticError(this, 'Missing directives-end indicator line');\n } else if (this.directives.length > 0) {\n this.contents = this.directives;\n this.directives = [];\n }\n\n return offset;\n }\n }\n\n if (src[offset]) {\n this.directivesEndMarker = new PlainValue.Range(offset, offset + 3);\n return offset + 3;\n }\n\n if (hasDirectives) {\n this.error = new PlainValue.YAMLSemanticError(this, 'Missing directives-end indicator line');\n } else if (this.directives.length > 0) {\n this.contents = this.directives;\n this.directives = [];\n }\n\n return offset;\n }\n\n parseContents(start) {\n const {\n parseNode,\n src\n } = this.context;\n if (!this.contents) this.contents = [];\n let lineStart = start;\n\n while (src[lineStart - 1] === '-') lineStart -= 1;\n\n let offset = PlainValue.Node.endOfWhiteSpace(src, start);\n let atLineStart = lineStart === start;\n this.valueRange = new PlainValue.Range(offset);\n\n while (!PlainValue.Node.atDocumentBoundary(src, offset, PlainValue.Char.DOCUMENT_END)) {\n switch (src[offset]) {\n case '\\n':\n if (atLineStart) {\n const blankLine = new BlankLine();\n offset = blankLine.parse({\n src\n }, offset);\n\n if (offset < src.length) {\n this.contents.push(blankLine);\n }\n } else {\n offset += 1;\n atLineStart = true;\n }\n\n lineStart = offset;\n break;\n\n case '#':\n {\n const comment = new Comment();\n offset = comment.parse({\n src\n }, offset);\n this.contents.push(comment);\n atLineStart = false;\n }\n break;\n\n default:\n {\n const iEnd = PlainValue.Node.endOfIndent(src, offset);\n const context = {\n atLineStart,\n indent: -1,\n inFlow: false,\n inCollection: false,\n lineStart,\n parent: this\n };\n const node = parseNode(context, iEnd);\n if (!node) return this.valueRange.end = iEnd; // at next document start\n\n this.contents.push(node);\n offset = node.range.end;\n atLineStart = false;\n const ec = grabCollectionEndComments(node);\n if (ec) Array.prototype.push.apply(this.contents, ec);\n }\n }\n\n offset = Document.startCommentOrEndBlankLine(src, offset);\n }\n\n this.valueRange.end = offset;\n\n if (src[offset]) {\n this.documentEndMarker = new PlainValue.Range(offset, offset + 3);\n offset += 3;\n\n if (src[offset]) {\n offset = PlainValue.Node.endOfWhiteSpace(src, offset);\n\n if (src[offset] === '#') {\n const comment = new Comment();\n offset = comment.parse({\n src\n }, offset);\n this.contents.push(comment);\n }\n\n switch (src[offset]) {\n case '\\n':\n offset += 1;\n break;\n\n case undefined:\n break;\n\n default:\n this.error = new PlainValue.YAMLSyntaxError(this, 'Document end marker line cannot have a non-comment suffix');\n }\n }\n }\n\n return offset;\n }\n /**\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this\n */\n\n\n parse(context, start) {\n context.root = this;\n this.context = context;\n const {\n src\n } = context;\n let offset = src.charCodeAt(start) === 0xfeff ? start + 1 : start; // skip BOM\n\n offset = this.parseDirectives(offset);\n offset = this.parseContents(offset);\n return offset;\n }\n\n setOrigRanges(cr, offset) {\n offset = super.setOrigRanges(cr, offset);\n this.directives.forEach(node => {\n offset = node.setOrigRanges(cr, offset);\n });\n if (this.directivesEndMarker) offset = this.directivesEndMarker.setOrigRange(cr, offset);\n this.contents.forEach(node => {\n offset = node.setOrigRanges(cr, offset);\n });\n if (this.documentEndMarker) offset = this.documentEndMarker.setOrigRange(cr, offset);\n return offset;\n }\n\n toString() {\n const {\n contents,\n directives,\n value\n } = this;\n if (value != null) return value;\n let str = directives.join('');\n\n if (contents.length > 0) {\n if (directives.length > 0 || contents[0].type === PlainValue.Type.COMMENT) str += '---\\n';\n str += contents.join('');\n }\n\n if (str[str.length - 1] !== '\\n') str += '\\n';\n return str;\n }\n\n}\n\nclass Alias extends PlainValue.Node {\n /**\n * Parses an *alias from the source\n *\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this scalar\n */\n parse(context, start) {\n this.context = context;\n const {\n src\n } = context;\n let offset = PlainValue.Node.endOfIdentifier(src, start + 1);\n this.valueRange = new PlainValue.Range(start + 1, offset);\n offset = PlainValue.Node.endOfWhiteSpace(src, offset);\n offset = this.parseComment(offset);\n return offset;\n }\n\n}\n\nconst Chomp = {\n CLIP: 'CLIP',\n KEEP: 'KEEP',\n STRIP: 'STRIP'\n};\nclass BlockValue extends PlainValue.Node {\n constructor(type, props) {\n super(type, props);\n this.blockIndent = null;\n this.chomping = Chomp.CLIP;\n this.header = null;\n }\n\n get includesTrailingLines() {\n return this.chomping === Chomp.KEEP;\n }\n\n get strValue() {\n if (!this.valueRange || !this.context) return null;\n let {\n start,\n end\n } = this.valueRange;\n const {\n indent,\n src\n } = this.context;\n if (this.valueRange.isEmpty()) return '';\n let lastNewLine = null;\n let ch = src[end - 1];\n\n while (ch === '\\n' || ch === '\\t' || ch === ' ') {\n end -= 1;\n\n if (end <= start) {\n if (this.chomping === Chomp.KEEP) break;else return ''; // probably never happens\n }\n\n if (ch === '\\n') lastNewLine = end;\n ch = src[end - 1];\n }\n\n let keepStart = end + 1;\n\n if (lastNewLine) {\n if (this.chomping === Chomp.KEEP) {\n keepStart = lastNewLine;\n end = this.valueRange.end;\n } else {\n end = lastNewLine;\n }\n }\n\n const bi = indent + this.blockIndent;\n const folded = this.type === PlainValue.Type.BLOCK_FOLDED;\n let atStart = true;\n let str = '';\n let sep = '';\n let prevMoreIndented = false;\n\n for (let i = start; i < end; ++i) {\n for (let j = 0; j < bi; ++j) {\n if (src[i] !== ' ') break;\n i += 1;\n }\n\n const ch = src[i];\n\n if (ch === '\\n') {\n if (sep === '\\n') str += '\\n';else sep = '\\n';\n } else {\n const lineEnd = PlainValue.Node.endOfLine(src, i);\n const line = src.slice(i, lineEnd);\n i = lineEnd;\n\n if (folded && (ch === ' ' || ch === '\\t') && i < keepStart) {\n if (sep === ' ') sep = '\\n';else if (!prevMoreIndented && !atStart && sep === '\\n') sep = '\\n\\n';\n str += sep + line; //+ ((lineEnd < end && src[lineEnd]) || '')\n\n sep = lineEnd < end && src[lineEnd] || '';\n prevMoreIndented = true;\n } else {\n str += sep + line;\n sep = folded && i < keepStart ? ' ' : '\\n';\n prevMoreIndented = false;\n }\n\n if (atStart && line !== '') atStart = false;\n }\n }\n\n return this.chomping === Chomp.STRIP ? str : str + '\\n';\n }\n\n parseBlockHeader(start) {\n const {\n src\n } = this.context;\n let offset = start + 1;\n let bi = '';\n\n while (true) {\n const ch = src[offset];\n\n switch (ch) {\n case '-':\n this.chomping = Chomp.STRIP;\n break;\n\n case '+':\n this.chomping = Chomp.KEEP;\n break;\n\n case '0':\n case '1':\n case '2':\n case '3':\n case '4':\n case '5':\n case '6':\n case '7':\n case '8':\n case '9':\n bi += ch;\n break;\n\n default:\n this.blockIndent = Number(bi) || null;\n this.header = new PlainValue.Range(start, offset);\n return offset;\n }\n\n offset += 1;\n }\n }\n\n parseBlockValue(start) {\n const {\n indent,\n src\n } = this.context;\n const explicit = !!this.blockIndent;\n let offset = start;\n let valueEnd = start;\n let minBlockIndent = 1;\n\n for (let ch = src[offset]; ch === '\\n'; ch = src[offset]) {\n offset += 1;\n if (PlainValue.Node.atDocumentBoundary(src, offset)) break;\n const end = PlainValue.Node.endOfBlockIndent(src, indent, offset); // should not include tab?\n\n if (end === null) break;\n const ch = src[end];\n const lineIndent = end - (offset + indent);\n\n if (!this.blockIndent) {\n // no explicit block indent, none yet detected\n if (src[end] !== '\\n') {\n // first line with non-whitespace content\n if (lineIndent < minBlockIndent) {\n const msg = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';\n this.error = new PlainValue.YAMLSemanticError(this, msg);\n }\n\n this.blockIndent = lineIndent;\n } else if (lineIndent > minBlockIndent) {\n // empty line with more whitespace\n minBlockIndent = lineIndent;\n }\n } else if (ch && ch !== '\\n' && lineIndent < this.blockIndent) {\n if (src[end] === '#') break;\n\n if (!this.error) {\n const src = explicit ? 'explicit indentation indicator' : 'first line';\n const msg = `Block scalars must not be less indented than their ${src}`;\n this.error = new PlainValue.YAMLSemanticError(this, msg);\n }\n }\n\n if (src[end] === '\\n') {\n offset = end;\n } else {\n offset = valueEnd = PlainValue.Node.endOfLine(src, end);\n }\n }\n\n if (this.chomping !== Chomp.KEEP) {\n offset = src[valueEnd] ? valueEnd + 1 : valueEnd;\n }\n\n this.valueRange = new PlainValue.Range(start + 1, offset);\n return offset;\n }\n /**\n * Parses a block value from the source\n *\n * Accepted forms are:\n * ```\n * BS\n * block\n * lines\n *\n * BS #comment\n * block\n * lines\n * ```\n * where the block style BS matches the regexp `[|>][-+1-9]*` and block lines\n * are empty or have an indent level greater than `indent`.\n *\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this block\n */\n\n\n parse(context, start) {\n this.context = context;\n const {\n src\n } = context;\n let offset = this.parseBlockHeader(start);\n offset = PlainValue.Node.endOfWhiteSpace(src, offset);\n offset = this.parseComment(offset);\n offset = this.parseBlockValue(offset);\n return offset;\n }\n\n setOrigRanges(cr, offset) {\n offset = super.setOrigRanges(cr, offset);\n return this.header ? this.header.setOrigRange(cr, offset) : offset;\n }\n\n}\n\nclass FlowCollection extends PlainValue.Node {\n constructor(type, props) {\n super(type, props);\n this.items = null;\n }\n\n prevNodeIsJsonLike(idx = this.items.length) {\n const node = this.items[idx - 1];\n return !!node && (node.jsonLike || node.type === PlainValue.Type.COMMENT && this.prevNodeIsJsonLike(idx - 1));\n }\n /**\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this\n */\n\n\n parse(context, start) {\n this.context = context;\n const {\n parseNode,\n src\n } = context;\n let {\n indent,\n lineStart\n } = context;\n let char = src[start]; // { or [\n\n this.items = [{\n char,\n offset: start\n }];\n let offset = PlainValue.Node.endOfWhiteSpace(src, start + 1);\n char = src[offset];\n\n while (char && char !== ']' && char !== '}') {\n switch (char) {\n case '\\n':\n {\n lineStart = offset + 1;\n const wsEnd = PlainValue.Node.endOfWhiteSpace(src, lineStart);\n\n if (src[wsEnd] === '\\n') {\n const blankLine = new BlankLine();\n lineStart = blankLine.parse({\n src\n }, lineStart);\n this.items.push(blankLine);\n }\n\n offset = PlainValue.Node.endOfIndent(src, lineStart);\n\n if (offset <= lineStart + indent) {\n char = src[offset];\n\n if (offset < lineStart + indent || char !== ']' && char !== '}') {\n const msg = 'Insufficient indentation in flow collection';\n this.error = new PlainValue.YAMLSemanticError(this, msg);\n }\n }\n }\n break;\n\n case ',':\n {\n this.items.push({\n char,\n offset\n });\n offset += 1;\n }\n break;\n\n case '#':\n {\n const comment = new Comment();\n offset = comment.parse({\n src\n }, offset);\n this.items.push(comment);\n }\n break;\n\n case '?':\n case ':':\n {\n const next = src[offset + 1];\n\n if (next === '\\n' || next === '\\t' || next === ' ' || next === ',' || // in-flow : after JSON-like key does not need to be followed by whitespace\n char === ':' && this.prevNodeIsJsonLike()) {\n this.items.push({\n char,\n offset\n });\n offset += 1;\n break;\n }\n }\n // fallthrough\n\n default:\n {\n const node = parseNode({\n atLineStart: false,\n inCollection: false,\n inFlow: true,\n indent: -1,\n lineStart,\n parent: this\n }, offset);\n\n if (!node) {\n // at next document start\n this.valueRange = new PlainValue.Range(start, offset);\n return offset;\n }\n\n this.items.push(node);\n offset = PlainValue.Node.normalizeOffset(src, node.range.end);\n }\n }\n\n offset = PlainValue.Node.endOfWhiteSpace(src, offset);\n char = src[offset];\n }\n\n this.valueRange = new PlainValue.Range(start, offset + 1);\n\n if (char) {\n this.items.push({\n char,\n offset\n });\n offset = PlainValue.Node.endOfWhiteSpace(src, offset + 1);\n offset = this.parseComment(offset);\n }\n\n return offset;\n }\n\n setOrigRanges(cr, offset) {\n offset = super.setOrigRanges(cr, offset);\n this.items.forEach(node => {\n if (node instanceof PlainValue.Node) {\n offset = node.setOrigRanges(cr, offset);\n } else if (cr.length === 0) {\n node.origOffset = node.offset;\n } else {\n let i = offset;\n\n while (i < cr.length) {\n if (cr[i] > node.offset) break;else ++i;\n }\n\n node.origOffset = node.offset + i;\n offset = i;\n }\n });\n return offset;\n }\n\n toString() {\n const {\n context: {\n src\n },\n items,\n range,\n value\n } = this;\n if (value != null) return value;\n const nodes = items.filter(item => item instanceof PlainValue.Node);\n let str = '';\n let prevEnd = range.start;\n nodes.forEach(node => {\n const prefix = src.slice(prevEnd, node.range.start);\n prevEnd = node.range.end;\n str += prefix + String(node);\n\n if (str[str.length - 1] === '\\n' && src[prevEnd - 1] !== '\\n' && src[prevEnd] === '\\n') {\n // Comment range does not include the terminal newline, but its\n // stringified value does. Without this fix, newlines at comment ends\n // get duplicated.\n prevEnd += 1;\n }\n });\n str += src.slice(prevEnd, range.end);\n return PlainValue.Node.addStringTerminator(src, range.end, str);\n }\n\n}\n\nclass QuoteDouble extends PlainValue.Node {\n static endOfQuote(src, offset) {\n let ch = src[offset];\n\n while (ch && ch !== '\"') {\n offset += ch === '\\\\' ? 2 : 1;\n ch = src[offset];\n }\n\n return offset + 1;\n }\n /**\n * @returns {string | { str: string, errors: YAMLSyntaxError[] }}\n */\n\n\n get strValue() {\n if (!this.valueRange || !this.context) return null;\n const errors = [];\n const {\n start,\n end\n } = this.valueRange;\n const {\n indent,\n src\n } = this.context;\n if (src[end - 1] !== '\"') errors.push(new PlainValue.YAMLSyntaxError(this, 'Missing closing \"quote')); // Using String#replace is too painful with escaped newlines preceded by\n // escaped backslashes; also, this should be faster.\n\n let str = '';\n\n for (let i = start + 1; i < end - 1; ++i) {\n const ch = src[i];\n\n if (ch === '\\n') {\n if (PlainValue.Node.atDocumentBoundary(src, i + 1)) errors.push(new PlainValue.YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values'));\n const {\n fold,\n offset,\n error\n } = PlainValue.Node.foldNewline(src, i, indent);\n str += fold;\n i = offset;\n if (error) errors.push(new PlainValue.YAMLSemanticError(this, 'Multi-line double-quoted string needs to be sufficiently indented'));\n } else if (ch === '\\\\') {\n i += 1;\n\n switch (src[i]) {\n case '0':\n str += '\\0';\n break;\n // null character\n\n case 'a':\n str += '\\x07';\n break;\n // bell character\n\n case 'b':\n str += '\\b';\n break;\n // backspace\n\n case 'e':\n str += '\\x1b';\n break;\n // escape character\n\n case 'f':\n str += '\\f';\n break;\n // form feed\n\n case 'n':\n str += '\\n';\n break;\n // line feed\n\n case 'r':\n str += '\\r';\n break;\n // carriage return\n\n case 't':\n str += '\\t';\n break;\n // horizontal tab\n\n case 'v':\n str += '\\v';\n break;\n // vertical tab\n\n case 'N':\n str += '\\u0085';\n break;\n // Unicode next line\n\n case '_':\n str += '\\u00a0';\n break;\n // Unicode non-breaking space\n\n case 'L':\n str += '\\u2028';\n break;\n // Unicode line separator\n\n case 'P':\n str += '\\u2029';\n break;\n // Unicode paragraph separator\n\n case ' ':\n str += ' ';\n break;\n\n case '\"':\n str += '\"';\n break;\n\n case '/':\n str += '/';\n break;\n\n case '\\\\':\n str += '\\\\';\n break;\n\n case '\\t':\n str += '\\t';\n break;\n\n case 'x':\n str += this.parseCharCode(i + 1, 2, errors);\n i += 2;\n break;\n\n case 'u':\n str += this.parseCharCode(i + 1, 4, errors);\n i += 4;\n break;\n\n case 'U':\n str += this.parseCharCode(i + 1, 8, errors);\n i += 8;\n break;\n\n case '\\n':\n // skip escaped newlines, but still trim the following line\n while (src[i + 1] === ' ' || src[i + 1] === '\\t') i += 1;\n\n break;\n\n default:\n errors.push(new PlainValue.YAMLSyntaxError(this, `Invalid escape sequence ${src.substr(i - 1, 2)}`));\n str += '\\\\' + src[i];\n }\n } else if (ch === ' ' || ch === '\\t') {\n // trim trailing whitespace\n const wsStart = i;\n let next = src[i + 1];\n\n while (next === ' ' || next === '\\t') {\n i += 1;\n next = src[i + 1];\n }\n\n if (next !== '\\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch;\n } else {\n str += ch;\n }\n }\n\n return errors.length > 0 ? {\n errors,\n str\n } : str;\n }\n\n parseCharCode(offset, length, errors) {\n const {\n src\n } = this.context;\n const cc = src.substr(offset, length);\n const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);\n const code = ok ? parseInt(cc, 16) : NaN;\n\n if (isNaN(code)) {\n errors.push(new PlainValue.YAMLSyntaxError(this, `Invalid escape sequence ${src.substr(offset - 2, length + 2)}`));\n return src.substr(offset - 2, length + 2);\n }\n\n return String.fromCodePoint(code);\n }\n /**\n * Parses a \"double quoted\" value from the source\n *\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this scalar\n */\n\n\n parse(context, start) {\n this.context = context;\n const {\n src\n } = context;\n let offset = QuoteDouble.endOfQuote(src, start + 1);\n this.valueRange = new PlainValue.Range(start, offset);\n offset = PlainValue.Node.endOfWhiteSpace(src, offset);\n offset = this.parseComment(offset);\n return offset;\n }\n\n}\n\nclass QuoteSingle extends PlainValue.Node {\n static endOfQuote(src, offset) {\n let ch = src[offset];\n\n while (ch) {\n if (ch === \"'\") {\n if (src[offset + 1] !== \"'\") break;\n ch = src[offset += 2];\n } else {\n ch = src[offset += 1];\n }\n }\n\n return offset + 1;\n }\n /**\n * @returns {string | { str: string, errors: YAMLSyntaxError[] }}\n */\n\n\n get strValue() {\n if (!this.valueRange || !this.context) return null;\n const errors = [];\n const {\n start,\n end\n } = this.valueRange;\n const {\n indent,\n src\n } = this.context;\n if (src[end - 1] !== \"'\") errors.push(new PlainValue.YAMLSyntaxError(this, \"Missing closing 'quote\"));\n let str = '';\n\n for (let i = start + 1; i < end - 1; ++i) {\n const ch = src[i];\n\n if (ch === '\\n') {\n if (PlainValue.Node.atDocumentBoundary(src, i + 1)) errors.push(new PlainValue.YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values'));\n const {\n fold,\n offset,\n error\n } = PlainValue.Node.foldNewline(src, i, indent);\n str += fold;\n i = offset;\n if (error) errors.push(new PlainValue.YAMLSemanticError(this, 'Multi-line single-quoted string needs to be sufficiently indented'));\n } else if (ch === \"'\") {\n str += ch;\n i += 1;\n if (src[i] !== \"'\") errors.push(new PlainValue.YAMLSyntaxError(this, 'Unescaped single quote? This should not happen.'));\n } else if (ch === ' ' || ch === '\\t') {\n // trim trailing whitespace\n const wsStart = i;\n let next = src[i + 1];\n\n while (next === ' ' || next === '\\t') {\n i += 1;\n next = src[i + 1];\n }\n\n if (next !== '\\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch;\n } else {\n str += ch;\n }\n }\n\n return errors.length > 0 ? {\n errors,\n str\n } : str;\n }\n /**\n * Parses a 'single quoted' value from the source\n *\n * @param {ParseContext} context\n * @param {number} start - Index of first character\n * @returns {number} - Index of the character after this scalar\n */\n\n\n parse(context, start) {\n this.context = context;\n const {\n src\n } = context;\n let offset = QuoteSingle.endOfQuote(src, start + 1);\n this.valueRange = new PlainValue.Range(start, offset);\n offset = PlainValue.Node.endOfWhiteSpace(src, offset);\n offset = this.parseComment(offset);\n return offset;\n }\n\n}\n\nfunction createNewNode(type, props) {\n switch (type) {\n case PlainValue.Type.ALIAS:\n return new Alias(type, props);\n\n case PlainValue.Type.BLOCK_FOLDED:\n case PlainValue.Type.BLOCK_LITERAL:\n return new BlockValue(type, props);\n\n case PlainValue.Type.FLOW_MAP:\n case PlainValue.Type.FLOW_SEQ:\n return new FlowCollection(type, props);\n\n case PlainValue.Type.MAP_KEY:\n case PlainValue.Type.MAP_VALUE:\n case PlainValue.Type.SEQ_ITEM:\n return new CollectionItem(type, props);\n\n case PlainValue.Type.COMMENT:\n case PlainValue.Type.PLAIN:\n return new PlainValue.PlainValue(type, props);\n\n case PlainValue.Type.QUOTE_DOUBLE:\n return new QuoteDouble(type, props);\n\n case PlainValue.Type.QUOTE_SINGLE:\n return new QuoteSingle(type, props);\n\n /* istanbul ignore next */\n\n default:\n return null;\n // should never happen\n }\n}\n/**\n * @param {boolean} atLineStart - Node starts at beginning of line\n * @param {boolean} inFlow - true if currently in a flow context\n * @param {boolean} inCollection - true if currently in a collection context\n * @param {number} indent - Current level of indentation\n * @param {number} lineStart - Start of the current line\n * @param {Node} parent - The parent of the node\n * @param {string} src - Source of the YAML document\n */\n\n\nclass ParseContext {\n static parseType(src, offset, inFlow) {\n switch (src[offset]) {\n case '*':\n return PlainValue.Type.ALIAS;\n\n case '>':\n return PlainValue.Type.BLOCK_FOLDED;\n\n case '|':\n return PlainValue.Type.BLOCK_LITERAL;\n\n case '{':\n return PlainValue.Type.FLOW_MAP;\n\n case '[':\n return PlainValue.Type.FLOW_SEQ;\n\n case '?':\n return !inFlow && PlainValue.Node.atBlank(src, offset + 1, true) ? PlainValue.Type.MAP_KEY : PlainValue.Type.PLAIN;\n\n case ':':\n return !inFlow && PlainValue.Node.atBlank(src, offset + 1, true) ? PlainValue.Type.MAP_VALUE : PlainValue.Type.PLAIN;\n\n case '-':\n return !inFlow && PlainValue.Node.atBlank(src, offset + 1, true) ? PlainValue.Type.SEQ_ITEM : PlainValue.Type.PLAIN;\n\n case '\"':\n return PlainValue.Type.QUOTE_DOUBLE;\n\n case \"'\":\n return PlainValue.Type.QUOTE_SINGLE;\n\n default:\n return PlainValue.Type.PLAIN;\n }\n }\n\n constructor(orig = {}, {\n atLineStart,\n inCollection,\n inFlow,\n indent,\n lineStart,\n parent\n } = {}) {\n PlainValue._defineProperty(this, \"parseNode\", (overlay, start) => {\n if (PlainValue.Node.atDocumentBoundary(this.src, start)) return null;\n const context = new ParseContext(this, overlay);\n const {\n props,\n type,\n valueStart\n } = context.parseProps(start);\n const node = createNewNode(type, props);\n let offset = node.parse(context, valueStart);\n node.range = new PlainValue.Range(start, offset);\n /* istanbul ignore if */\n\n if (offset <= start) {\n // This should never happen, but if it does, let's make sure to at least\n // step one character forward to avoid a busy loop.\n node.error = new Error(`Node#parse consumed no characters`);\n node.error.parseEnd = offset;\n node.error.source = node;\n node.range.end = start + 1;\n }\n\n if (context.nodeStartsCollection(node)) {\n if (!node.error && !context.atLineStart && context.parent.type === PlainValue.Type.DOCUMENT) {\n node.error = new PlainValue.YAMLSyntaxError(node, 'Block collection must not have preceding content here (e.g. directives-end indicator)');\n }\n\n const collection = new Collection(node);\n offset = collection.parse(new ParseContext(context), offset);\n collection.range = new PlainValue.Range(start, offset);\n return collection;\n }\n\n return node;\n });\n\n this.atLineStart = atLineStart != null ? atLineStart : orig.atLineStart || false;\n this.inCollection = inCollection != null ? inCollection : orig.inCollection || false;\n this.inFlow = inFlow != null ? inFlow : orig.inFlow || false;\n this.indent = indent != null ? indent : orig.indent;\n this.lineStart = lineStart != null ? lineStart : orig.lineStart;\n this.parent = parent != null ? parent : orig.parent || {};\n this.root = orig.root;\n this.src = orig.src;\n }\n\n nodeStartsCollection(node) {\n const {\n inCollection,\n inFlow,\n src\n } = this;\n if (inCollection || inFlow) return false;\n if (node instanceof CollectionItem) return true; // check for implicit key\n\n let offset = node.range.end;\n if (src[offset] === '\\n' || src[offset - 1] === '\\n') return false;\n offset = PlainValue.Node.endOfWhiteSpace(src, offset);\n return src[offset] === ':';\n } // Anchor and tag are before type, which determines the node implementation\n // class; hence this intermediate step.\n\n\n parseProps(offset) {\n const {\n inFlow,\n parent,\n src\n } = this;\n const props = [];\n let lineHasProps = false;\n offset = this.atLineStart ? PlainValue.Node.endOfIndent(src, offset) : PlainValue.Node.endOfWhiteSpace(src, offset);\n let ch = src[offset];\n\n while (ch === PlainValue.Char.ANCHOR || ch === PlainValue.Char.COMMENT || ch === PlainValue.Char.TAG || ch === '\\n') {\n if (ch === '\\n') {\n const lineStart = offset + 1;\n const inEnd = PlainValue.Node.endOfIndent(src, lineStart);\n const indentDiff = inEnd - (lineStart + this.indent);\n const noIndicatorAsIndent = parent.type === PlainValue.Type.SEQ_ITEM && parent.context.atLineStart;\n if (!PlainValue.Node.nextNodeIsIndented(src[inEnd], indentDiff, !noIndicatorAsIndent)) break;\n this.atLineStart = true;\n this.lineStart = lineStart;\n lineHasProps = false;\n offset = inEnd;\n } else if (ch === PlainValue.Char.COMMENT) {\n const end = PlainValue.Node.endOfLine(src, offset + 1);\n props.push(new PlainValue.Range(offset, end));\n offset = end;\n } else {\n let end = PlainValue.Node.endOfIdentifier(src, offset + 1);\n\n if (ch === PlainValue.Char.TAG && src[end] === ',' && /^[a-zA-Z0-9-]+\\.[a-zA-Z0-9-]+,\\d\\d\\d\\d(-\\d\\d){0,2}\\/\\S/.test(src.slice(offset + 1, end + 13))) {\n // Let's presume we're dealing with a YAML 1.0 domain tag here, rather\n // than an empty but 'foo.bar' private-tagged node in a flow collection\n // followed without whitespace by a plain string starting with a year\n // or date divided by something.\n end = PlainValue.Node.endOfIdentifier(src, end + 5);\n }\n\n props.push(new PlainValue.Range(offset, end));\n lineHasProps = true;\n offset = PlainValue.Node.endOfWhiteSpace(src, end);\n }\n\n ch = src[offset];\n } // '- &a : b' has an anchor on an empty node\n\n\n if (lineHasProps && ch === ':' && PlainValue.Node.atBlank(src, offset + 1, true)) offset -= 1;\n const type = ParseContext.parseType(src, offset, inFlow);\n return {\n props,\n type,\n valueStart: offset\n };\n }\n /**\n * Parses a node from the source\n * @param {ParseContext} overlay\n * @param {number} start - Index of first non-whitespace character for the node\n * @returns {?Node} - null if at a document boundary\n */\n\n\n}\n\n// Published as 'yaml/parse-cst'\nfunction parse(src) {\n const cr = [];\n\n if (src.indexOf('\\r') !== -1) {\n src = src.replace(/\\r\\n?/g, (match, offset) => {\n if (match.length > 1) cr.push(offset);\n return '\\n';\n });\n }\n\n const documents = [];\n let offset = 0;\n\n do {\n const doc = new Document();\n const context = new ParseContext({\n src\n });\n offset = doc.parse(context, offset);\n documents.push(doc);\n } while (offset < src.length);\n\n documents.setOrigRanges = () => {\n if (cr.length === 0) return false;\n\n for (let i = 1; i < cr.length; ++i) cr[i] -= i;\n\n let crOffset = 0;\n\n for (let i = 0; i < documents.length; ++i) {\n crOffset = documents[i].setOrigRanges(cr, crOffset);\n }\n\n cr.splice(0, cr.length);\n return true;\n };\n\n documents.toString = () => documents.join('...\\n');\n\n return documents;\n}\n\nexports.parse = parse;\n","'use strict';\n\nvar PlainValue = require('./PlainValue-ec8e588e.js');\n\nfunction addCommentBefore(str, indent, comment) {\n if (!comment) return str;\n const cc = comment.replace(/[\\s\\S]^/gm, `$&${indent}#`);\n return `#${cc}\\n${indent}${str}`;\n}\nfunction addComment(str, indent, comment) {\n return !comment ? str : comment.indexOf('\\n') === -1 ? `${str} #${comment}` : `${str}\\n` + comment.replace(/^/gm, `${indent || ''}#`);\n}\n\nclass Node {}\n\nfunction toJSON(value, arg, ctx) {\n if (Array.isArray(value)) return value.map((v, i) => toJSON(v, String(i), ctx));\n\n if (value && typeof value.toJSON === 'function') {\n const anchor = ctx && ctx.anchors && ctx.anchors.get(value);\n if (anchor) ctx.onCreate = res => {\n anchor.res = res;\n delete ctx.onCreate;\n };\n const res = value.toJSON(arg, ctx);\n if (anchor && ctx.onCreate) ctx.onCreate(res);\n return res;\n }\n\n if ((!ctx || !ctx.keep) && typeof value === 'bigint') return Number(value);\n return value;\n}\n\nclass Scalar extends Node {\n constructor(value) {\n super();\n this.value = value;\n }\n\n toJSON(arg, ctx) {\n return ctx && ctx.keep ? this.value : toJSON(this.value, arg, ctx);\n }\n\n toString() {\n return String(this.value);\n }\n\n}\n\nfunction collectionFromPath(schema, path, value) {\n let v = value;\n\n for (let i = path.length - 1; i >= 0; --i) {\n const k = path[i];\n const o = Number.isInteger(k) && k >= 0 ? [] : {};\n o[k] = v;\n v = o;\n }\n\n return schema.createNode(v, false);\n} // null, undefined, or an empty non-string iterable (e.g. [])\n\n\nconst isEmptyPath = path => path == null || typeof path === 'object' && path[Symbol.iterator]().next().done;\nclass Collection extends Node {\n constructor(schema) {\n super();\n\n PlainValue._defineProperty(this, \"items\", []);\n\n this.schema = schema;\n }\n\n addIn(path, value) {\n if (isEmptyPath(path)) this.add(value);else {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (node instanceof Collection) node.addIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n\n deleteIn([key, ...rest]) {\n if (rest.length === 0) return this.delete(key);\n const node = this.get(key, true);\n if (node instanceof Collection) return node.deleteIn(rest);else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n\n getIn([key, ...rest], keepScalar) {\n const node = this.get(key, true);\n if (rest.length === 0) return !keepScalar && node instanceof Scalar ? node.value : node;else return node instanceof Collection ? node.getIn(rest, keepScalar) : undefined;\n }\n\n hasAllNullValues() {\n return this.items.every(node => {\n if (!node || node.type !== 'PAIR') return false;\n const n = node.value;\n return n == null || n instanceof Scalar && n.value == null && !n.commentBefore && !n.comment && !n.tag;\n });\n }\n\n hasIn([key, ...rest]) {\n if (rest.length === 0) return this.has(key);\n const node = this.get(key, true);\n return node instanceof Collection ? node.hasIn(rest) : false;\n }\n\n setIn([key, ...rest], value) {\n if (rest.length === 0) {\n this.set(key, value);\n } else {\n const node = this.get(key, true);\n if (node instanceof Collection) node.setIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n } // overridden in implementations\n\n /* istanbul ignore next */\n\n\n toJSON() {\n return null;\n }\n\n toString(ctx, {\n blockItem,\n flowChars,\n isMap,\n itemIndent\n }, onComment, onChompKeep) {\n const {\n indent,\n indentStep,\n stringify\n } = ctx;\n const inFlow = this.type === PlainValue.Type.FLOW_MAP || this.type === PlainValue.Type.FLOW_SEQ || ctx.inFlow;\n if (inFlow) itemIndent += indentStep;\n const allNullValues = isMap && this.hasAllNullValues();\n ctx = Object.assign({}, ctx, {\n allNullValues,\n indent: itemIndent,\n inFlow,\n type: null\n });\n let chompKeep = false;\n let hasItemWithNewLine = false;\n const nodes = this.items.reduce((nodes, item, i) => {\n let comment;\n\n if (item) {\n if (!chompKeep && item.spaceBefore) nodes.push({\n type: 'comment',\n str: ''\n });\n if (item.commentBefore) item.commentBefore.match(/^.*$/gm).forEach(line => {\n nodes.push({\n type: 'comment',\n str: `#${line}`\n });\n });\n if (item.comment) comment = item.comment;\n if (inFlow && (!chompKeep && item.spaceBefore || item.commentBefore || item.comment || item.key && (item.key.commentBefore || item.key.comment) || item.value && (item.value.commentBefore || item.value.comment))) hasItemWithNewLine = true;\n }\n\n chompKeep = false;\n let str = stringify(item, ctx, () => comment = null, () => chompKeep = true);\n if (inFlow && !hasItemWithNewLine && str.includes('\\n')) hasItemWithNewLine = true;\n if (inFlow && i < this.items.length - 1) str += ',';\n str = addComment(str, itemIndent, comment);\n if (chompKeep && (comment || inFlow)) chompKeep = false;\n nodes.push({\n type: 'item',\n str\n });\n return nodes;\n }, []);\n let str;\n\n if (nodes.length === 0) {\n str = flowChars.start + flowChars.end;\n } else if (inFlow) {\n const {\n start,\n end\n } = flowChars;\n const strings = nodes.map(n => n.str);\n\n if (hasItemWithNewLine || strings.reduce((sum, str) => sum + str.length + 2, 2) > Collection.maxFlowStringSingleLineLength) {\n str = start;\n\n for (const s of strings) {\n str += s ? `\\n${indentStep}${indent}${s}` : '\\n';\n }\n\n str += `\\n${indent}${end}`;\n } else {\n str = `${start} ${strings.join(' ')} ${end}`;\n }\n } else {\n const strings = nodes.map(blockItem);\n str = strings.shift();\n\n for (const s of strings) str += s ? `\\n${indent}${s}` : '\\n';\n }\n\n if (this.comment) {\n str += '\\n' + this.comment.replace(/^/gm, `${indent}#`);\n if (onComment) onComment();\n } else if (chompKeep && onChompKeep) onChompKeep();\n\n return str;\n }\n\n}\n\nPlainValue._defineProperty(Collection, \"maxFlowStringSingleLineLength\", 60);\n\nfunction asItemIndex(key) {\n let idx = key instanceof Scalar ? key.value : key;\n if (idx && typeof idx === 'string') idx = Number(idx);\n return Number.isInteger(idx) && idx >= 0 ? idx : null;\n}\n\nclass YAMLSeq extends Collection {\n add(value) {\n this.items.push(value);\n }\n\n delete(key) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number') return false;\n const del = this.items.splice(idx, 1);\n return del.length > 0;\n }\n\n get(key, keepScalar) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number') return undefined;\n const it = this.items[idx];\n return !keepScalar && it instanceof Scalar ? it.value : it;\n }\n\n has(key) {\n const idx = asItemIndex(key);\n return typeof idx === 'number' && idx < this.items.length;\n }\n\n set(key, value) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number') throw new Error(`Expected a valid index, not ${key}.`);\n this.items[idx] = value;\n }\n\n toJSON(_, ctx) {\n const seq = [];\n if (ctx && ctx.onCreate) ctx.onCreate(seq);\n let i = 0;\n\n for (const item of this.items) seq.push(toJSON(item, String(i++), ctx));\n\n return seq;\n }\n\n toString(ctx, onComment, onChompKeep) {\n if (!ctx) return JSON.stringify(this);\n return super.toString(ctx, {\n blockItem: n => n.type === 'comment' ? n.str : `- ${n.str}`,\n flowChars: {\n start: '[',\n end: ']'\n },\n isMap: false,\n itemIndent: (ctx.indent || '') + ' '\n }, onComment, onChompKeep);\n }\n\n}\n\nconst stringifyKey = (key, jsKey, ctx) => {\n if (jsKey === null) return '';\n if (typeof jsKey !== 'object') return String(jsKey);\n if (key instanceof Node && ctx && ctx.doc) return key.toString({\n anchors: {},\n doc: ctx.doc,\n indent: '',\n indentStep: ctx.indentStep,\n inFlow: true,\n inStringifyKey: true,\n stringify: ctx.stringify\n });\n return JSON.stringify(jsKey);\n};\n\nclass Pair extends Node {\n constructor(key, value = null) {\n super();\n this.key = key;\n this.value = value;\n this.type = Pair.Type.PAIR;\n }\n\n get commentBefore() {\n return this.key instanceof Node ? this.key.commentBefore : undefined;\n }\n\n set commentBefore(cb) {\n if (this.key == null) this.key = new Scalar(null);\n if (this.key instanceof Node) this.key.commentBefore = cb;else {\n const msg = 'Pair.commentBefore is an alias for Pair.key.commentBefore. To set it, the key must be a Node.';\n throw new Error(msg);\n }\n }\n\n addToJSMap(ctx, map) {\n const key = toJSON(this.key, '', ctx);\n\n if (map instanceof Map) {\n const value = toJSON(this.value, key, ctx);\n map.set(key, value);\n } else if (map instanceof Set) {\n map.add(key);\n } else {\n const stringKey = stringifyKey(this.key, key, ctx);\n map[stringKey] = toJSON(this.value, stringKey, ctx);\n }\n\n return map;\n }\n\n toJSON(_, ctx) {\n const pair = ctx && ctx.mapAsMap ? new Map() : {};\n return this.addToJSMap(ctx, pair);\n }\n\n toString(ctx, onComment, onChompKeep) {\n if (!ctx || !ctx.doc) return JSON.stringify(this);\n const {\n indent: indentSize,\n indentSeq,\n simpleKeys\n } = ctx.doc.options;\n let {\n key,\n value\n } = this;\n let keyComment = key instanceof Node && key.comment;\n\n if (simpleKeys) {\n if (keyComment) {\n throw new Error('With simple keys, key nodes cannot have comments');\n }\n\n if (key instanceof Collection) {\n const msg = 'With simple keys, collection cannot be used as a key value';\n throw new Error(msg);\n }\n }\n\n const explicitKey = !simpleKeys && (!key || keyComment || key instanceof Collection || key.type === PlainValue.Type.BLOCK_FOLDED || key.type === PlainValue.Type.BLOCK_LITERAL);\n const {\n doc,\n indent,\n indentStep,\n stringify\n } = ctx;\n ctx = Object.assign({}, ctx, {\n implicitKey: !explicitKey,\n indent: indent + indentStep\n });\n let chompKeep = false;\n let str = stringify(key, ctx, () => keyComment = null, () => chompKeep = true);\n str = addComment(str, ctx.indent, keyComment);\n\n if (ctx.allNullValues && !simpleKeys) {\n if (this.comment) {\n str = addComment(str, ctx.indent, this.comment);\n if (onComment) onComment();\n } else if (chompKeep && !keyComment && onChompKeep) onChompKeep();\n\n return ctx.inFlow ? str : `? ${str}`;\n }\n\n str = explicitKey ? `? ${str}\\n${indent}:` : `${str}:`;\n\n if (this.comment) {\n // expected (but not strictly required) to be a single-line comment\n str = addComment(str, ctx.indent, this.comment);\n if (onComment) onComment();\n }\n\n let vcb = '';\n let valueComment = null;\n\n if (value instanceof Node) {\n if (value.spaceBefore) vcb = '\\n';\n\n if (value.commentBefore) {\n const cs = value.commentBefore.replace(/^/gm, `${ctx.indent}#`);\n vcb += `\\n${cs}`;\n }\n\n valueComment = value.comment;\n } else if (value && typeof value === 'object') {\n value = doc.schema.createNode(value, true);\n }\n\n ctx.implicitKey = false;\n if (!explicitKey && !this.comment && value instanceof Scalar) ctx.indentAtStart = str.length + 1;\n chompKeep = false;\n\n if (!indentSeq && indentSize >= 2 && !ctx.inFlow && !explicitKey && value instanceof YAMLSeq && value.type !== PlainValue.Type.FLOW_SEQ && !value.tag && !doc.anchors.getName(value)) {\n // If indentSeq === false, consider '- ' as part of indentation where possible\n ctx.indent = ctx.indent.substr(2);\n }\n\n const valueStr = stringify(value, ctx, () => valueComment = null, () => chompKeep = true);\n let ws = ' ';\n\n if (vcb || this.comment) {\n ws = `${vcb}\\n${ctx.indent}`;\n } else if (!explicitKey && value instanceof Collection) {\n const flow = valueStr[0] === '[' || valueStr[0] === '{';\n if (!flow || valueStr.includes('\\n')) ws = `\\n${ctx.indent}`;\n }\n\n if (chompKeep && !valueComment && onChompKeep) onChompKeep();\n return addComment(str + ws + valueStr, ctx.indent, valueComment);\n }\n\n}\n\nPlainValue._defineProperty(Pair, \"Type\", {\n PAIR: 'PAIR',\n MERGE_PAIR: 'MERGE_PAIR'\n});\n\nconst getAliasCount = (node, anchors) => {\n if (node instanceof Alias) {\n const anchor = anchors.get(node.source);\n return anchor.count * anchor.aliasCount;\n } else if (node instanceof Collection) {\n let count = 0;\n\n for (const item of node.items) {\n const c = getAliasCount(item, anchors);\n if (c > count) count = c;\n }\n\n return count;\n } else if (node instanceof Pair) {\n const kc = getAliasCount(node.key, anchors);\n const vc = getAliasCount(node.value, anchors);\n return Math.max(kc, vc);\n }\n\n return 1;\n};\n\nclass Alias extends Node {\n static stringify({\n range,\n source\n }, {\n anchors,\n doc,\n implicitKey,\n inStringifyKey\n }) {\n let anchor = Object.keys(anchors).find(a => anchors[a] === source);\n if (!anchor && inStringifyKey) anchor = doc.anchors.getName(source) || doc.anchors.newName();\n if (anchor) return `*${anchor}${implicitKey ? ' ' : ''}`;\n const msg = doc.anchors.getName(source) ? 'Alias node must be after source node' : 'Source node not found for alias node';\n throw new Error(`${msg} [${range}]`);\n }\n\n constructor(source) {\n super();\n this.source = source;\n this.type = PlainValue.Type.ALIAS;\n }\n\n set tag(t) {\n throw new Error('Alias nodes cannot have tags');\n }\n\n toJSON(arg, ctx) {\n if (!ctx) return toJSON(this.source, arg, ctx);\n const {\n anchors,\n maxAliasCount\n } = ctx;\n const anchor = anchors.get(this.source);\n /* istanbul ignore if */\n\n if (!anchor || anchor.res === undefined) {\n const msg = 'This should not happen: Alias anchor was not resolved?';\n if (this.cstNode) throw new PlainValue.YAMLReferenceError(this.cstNode, msg);else throw new ReferenceError(msg);\n }\n\n if (maxAliasCount >= 0) {\n anchor.count += 1;\n if (anchor.aliasCount === 0) anchor.aliasCount = getAliasCount(this.source, anchors);\n\n if (anchor.count * anchor.aliasCount > maxAliasCount) {\n const msg = 'Excessive alias count indicates a resource exhaustion attack';\n if (this.cstNode) throw new PlainValue.YAMLReferenceError(this.cstNode, msg);else throw new ReferenceError(msg);\n }\n }\n\n return anchor.res;\n } // Only called when stringifying an alias mapping key while constructing\n // Object output.\n\n\n toString(ctx) {\n return Alias.stringify(this, ctx);\n }\n\n}\n\nPlainValue._defineProperty(Alias, \"default\", true);\n\nfunction findPair(items, key) {\n const k = key instanceof Scalar ? key.value : key;\n\n for (const it of items) {\n if (it instanceof Pair) {\n if (it.key === key || it.key === k) return it;\n if (it.key && it.key.value === k) return it;\n }\n }\n\n return undefined;\n}\nclass YAMLMap extends Collection {\n add(pair, overwrite) {\n if (!pair) pair = new Pair(pair);else if (!(pair instanceof Pair)) pair = new Pair(pair.key || pair, pair.value);\n const prev = findPair(this.items, pair.key);\n const sortEntries = this.schema && this.schema.sortMapEntries;\n\n if (prev) {\n if (overwrite) prev.value = pair.value;else throw new Error(`Key ${pair.key} already set`);\n } else if (sortEntries) {\n const i = this.items.findIndex(item => sortEntries(pair, item) < 0);\n if (i === -1) this.items.push(pair);else this.items.splice(i, 0, pair);\n } else {\n this.items.push(pair);\n }\n }\n\n delete(key) {\n const it = findPair(this.items, key);\n if (!it) return false;\n const del = this.items.splice(this.items.indexOf(it), 1);\n return del.length > 0;\n }\n\n get(key, keepScalar) {\n const it = findPair(this.items, key);\n const node = it && it.value;\n return !keepScalar && node instanceof Scalar ? node.value : node;\n }\n\n has(key) {\n return !!findPair(this.items, key);\n }\n\n set(key, value) {\n this.add(new Pair(key, value), true);\n }\n /**\n * @param {*} arg ignored\n * @param {*} ctx Conversion context, originally set in Document#toJSON()\n * @param {Class} Type If set, forces the returned collection type\n * @returns {*} Instance of Type, Map, or Object\n */\n\n\n toJSON(_, ctx, Type) {\n const map = Type ? new Type() : ctx && ctx.mapAsMap ? new Map() : {};\n if (ctx && ctx.onCreate) ctx.onCreate(map);\n\n for (const item of this.items) item.addToJSMap(ctx, map);\n\n return map;\n }\n\n toString(ctx, onComment, onChompKeep) {\n if (!ctx) return JSON.stringify(this);\n\n for (const item of this.items) {\n if (!(item instanceof Pair)) throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);\n }\n\n return super.toString(ctx, {\n blockItem: n => n.str,\n flowChars: {\n start: '{',\n end: '}'\n },\n isMap: true,\n itemIndent: ctx.indent || ''\n }, onComment, onChompKeep);\n }\n\n}\n\nconst MERGE_KEY = '<<';\nclass Merge extends Pair {\n constructor(pair) {\n if (pair instanceof Pair) {\n let seq = pair.value;\n\n if (!(seq instanceof YAMLSeq)) {\n seq = new YAMLSeq();\n seq.items.push(pair.value);\n seq.range = pair.value.range;\n }\n\n super(pair.key, seq);\n this.range = pair.range;\n } else {\n super(new Scalar(MERGE_KEY), new YAMLSeq());\n }\n\n this.type = Pair.Type.MERGE_PAIR;\n } // If the value associated with a merge key is a single mapping node, each of\n // its key/value pairs is inserted into the current mapping, unless the key\n // already exists in it. If the value associated with the merge key is a\n // sequence, then this sequence is expected to contain mapping nodes and each\n // of these nodes is merged in turn according to its order in the sequence.\n // Keys in mapping nodes earlier in the sequence override keys specified in\n // later mapping nodes. -- http://yaml.org/type/merge.html\n\n\n addToJSMap(ctx, map) {\n for (const {\n source\n } of this.value.items) {\n if (!(source instanceof YAMLMap)) throw new Error('Merge sources must be maps');\n const srcMap = source.toJSON(null, ctx, Map);\n\n for (const [key, value] of srcMap) {\n if (map instanceof Map) {\n if (!map.has(key)) map.set(key, value);\n } else if (map instanceof Set) {\n map.add(key);\n } else {\n if (!Object.prototype.hasOwnProperty.call(map, key)) map[key] = value;\n }\n }\n }\n\n return map;\n }\n\n toString(ctx, onComment) {\n const seq = this.value;\n if (seq.items.length > 1) return super.toString(ctx, onComment);\n this.value = seq.items[0];\n const str = super.toString(ctx, onComment);\n this.value = seq;\n return str;\n }\n\n}\n\nconst binaryOptions = {\n defaultType: PlainValue.Type.BLOCK_LITERAL,\n lineWidth: 76\n};\nconst boolOptions = {\n trueStr: 'true',\n falseStr: 'false'\n};\nconst intOptions = {\n asBigInt: false\n};\nconst nullOptions = {\n nullStr: 'null'\n};\nconst strOptions = {\n defaultType: PlainValue.Type.PLAIN,\n doubleQuoted: {\n jsonEncoding: false,\n minMultiLineLength: 40\n },\n fold: {\n lineWidth: 80,\n minContentWidth: 20\n }\n};\n\nfunction resolveScalar(str, tags, scalarFallback) {\n for (const {\n format,\n test,\n resolve\n } of tags) {\n if (test) {\n const match = str.match(test);\n\n if (match) {\n let res = resolve.apply(null, match);\n if (!(res instanceof Scalar)) res = new Scalar(res);\n if (format) res.format = format;\n return res;\n }\n }\n }\n\n if (scalarFallback) str = scalarFallback(str);\n return new Scalar(str);\n}\n\nconst FOLD_FLOW = 'flow';\nconst FOLD_BLOCK = 'block';\nconst FOLD_QUOTED = 'quoted'; // presumes i+1 is at the start of a line\n// returns index of last newline in more-indented block\n\nconst consumeMoreIndentedLines = (text, i) => {\n let ch = text[i + 1];\n\n while (ch === ' ' || ch === '\\t') {\n do {\n ch = text[i += 1];\n } while (ch && ch !== '\\n');\n\n ch = text[i + 1];\n }\n\n return i;\n};\n/**\n * Tries to keep input at up to `lineWidth` characters, splitting only on spaces\n * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are\n * terminated with `\\n` and started with `indent`.\n *\n * @param {string} text\n * @param {string} indent\n * @param {string} [mode='flow'] `'block'` prevents more-indented lines\n * from being folded; `'quoted'` allows for `\\` escapes, including escaped\n * newlines\n * @param {Object} options\n * @param {number} [options.indentAtStart] Accounts for leading contents on\n * the first line, defaulting to `indent.length`\n * @param {number} [options.lineWidth=80]\n * @param {number} [options.minContentWidth=20] Allow highly indented lines to\n * stretch the line width\n * @param {function} options.onFold Called once if the text is folded\n * @param {function} options.onFold Called once if any line of text exceeds\n * lineWidth characters\n */\n\n\nfunction foldFlowLines(text, indent, mode, {\n indentAtStart,\n lineWidth = 80,\n minContentWidth = 20,\n onFold,\n onOverflow\n}) {\n if (!lineWidth || lineWidth < 0) return text;\n const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);\n if (text.length <= endStep) return text;\n const folds = [];\n const escapedFolds = {};\n let end = lineWidth - (typeof indentAtStart === 'number' ? indentAtStart : indent.length);\n let split = undefined;\n let prev = undefined;\n let overflow = false;\n let i = -1;\n\n if (mode === FOLD_BLOCK) {\n i = consumeMoreIndentedLines(text, i);\n if (i !== -1) end = i + endStep;\n }\n\n for (let ch; ch = text[i += 1];) {\n if (mode === FOLD_QUOTED && ch === '\\\\') {\n switch (text[i + 1]) {\n case 'x':\n i += 3;\n break;\n\n case 'u':\n i += 5;\n break;\n\n case 'U':\n i += 9;\n break;\n\n default:\n i += 1;\n }\n }\n\n if (ch === '\\n') {\n if (mode === FOLD_BLOCK) i = consumeMoreIndentedLines(text, i);\n end = i + endStep;\n split = undefined;\n } else {\n if (ch === ' ' && prev && prev !== ' ' && prev !== '\\n' && prev !== '\\t') {\n // space surrounded by non-space can be replaced with newline + indent\n const next = text[i + 1];\n if (next && next !== ' ' && next !== '\\n' && next !== '\\t') split = i;\n }\n\n if (i >= end) {\n if (split) {\n folds.push(split);\n end = split + endStep;\n split = undefined;\n } else if (mode === FOLD_QUOTED) {\n // white-space collected at end may stretch past lineWidth\n while (prev === ' ' || prev === '\\t') {\n prev = ch;\n ch = text[i += 1];\n overflow = true;\n } // i - 2 accounts for not-dropped last char + newline-escaping \\\n\n\n folds.push(i - 2);\n escapedFolds[i - 2] = true;\n end = i - 2 + endStep;\n split = undefined;\n } else {\n overflow = true;\n }\n }\n }\n\n prev = ch;\n }\n\n if (overflow && onOverflow) onOverflow();\n if (folds.length === 0) return text;\n if (onFold) onFold();\n let res = text.slice(0, folds[0]);\n\n for (let i = 0; i < folds.length; ++i) {\n const fold = folds[i];\n const end = folds[i + 1] || text.length;\n if (mode === FOLD_QUOTED && escapedFolds[fold]) res += `${text[fold]}\\\\`;\n res += `\\n${indent}${text.slice(fold + 1, end)}`;\n }\n\n return res;\n}\n\nconst getFoldOptions = ({\n indentAtStart\n}) => indentAtStart ? Object.assign({\n indentAtStart\n}, strOptions.fold) : strOptions.fold; // Also checks for lines starting with %, as parsing the output as YAML 1.1 will\n// presume that's starting a new document.\n\n\nconst containsDocumentMarker = str => /^(%|---|\\.\\.\\.)/m.test(str);\n\nfunction lineLengthOverLimit(str, limit) {\n const strLen = str.length;\n if (strLen <= limit) return false;\n\n for (let i = 0, start = 0; i < strLen; ++i) {\n if (str[i] === '\\n') {\n if (i - start > limit) return true;\n start = i + 1;\n if (strLen - start <= limit) return false;\n }\n }\n\n return true;\n}\n\nfunction doubleQuotedString(value, ctx) {\n const {\n implicitKey\n } = ctx;\n const {\n jsonEncoding,\n minMultiLineLength\n } = strOptions.doubleQuoted;\n const json = JSON.stringify(value);\n if (jsonEncoding) return json;\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n let str = '';\n let start = 0;\n\n for (let i = 0, ch = json[i]; ch; ch = json[++i]) {\n if (ch === ' ' && json[i + 1] === '\\\\' && json[i + 2] === 'n') {\n // space before newline needs to be escaped to not be folded\n str += json.slice(start, i) + '\\\\ ';\n i += 1;\n start = i;\n ch = '\\\\';\n }\n\n if (ch === '\\\\') switch (json[i + 1]) {\n case 'u':\n {\n str += json.slice(start, i);\n const code = json.substr(i + 2, 4);\n\n switch (code) {\n case '0000':\n str += '\\\\0';\n break;\n\n case '0007':\n str += '\\\\a';\n break;\n\n case '000b':\n str += '\\\\v';\n break;\n\n case '001b':\n str += '\\\\e';\n break;\n\n case '0085':\n str += '\\\\N';\n break;\n\n case '00a0':\n str += '\\\\_';\n break;\n\n case '2028':\n str += '\\\\L';\n break;\n\n case '2029':\n str += '\\\\P';\n break;\n\n default:\n if (code.substr(0, 2) === '00') str += '\\\\x' + code.substr(2);else str += json.substr(i, 6);\n }\n\n i += 5;\n start = i + 1;\n }\n break;\n\n case 'n':\n if (implicitKey || json[i + 2] === '\"' || json.length < minMultiLineLength) {\n i += 1;\n } else {\n // folding will eat first newline\n str += json.slice(start, i) + '\\n\\n';\n\n while (json[i + 2] === '\\\\' && json[i + 3] === 'n' && json[i + 4] !== '\"') {\n str += '\\n';\n i += 2;\n }\n\n str += indent; // space after newline needs to be escaped to not be folded\n\n if (json[i + 2] === ' ') str += '\\\\';\n i += 1;\n start = i + 1;\n }\n\n break;\n\n default:\n i += 1;\n }\n }\n\n str = start ? str + json.slice(start) : json;\n return implicitKey ? str : foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx));\n}\n\nfunction singleQuotedString(value, ctx) {\n if (ctx.implicitKey) {\n if (/\\n/.test(value)) return doubleQuotedString(value, ctx);\n } else {\n // single quoted string can't have leading or trailing whitespace around newline\n if (/[ \\t]\\n|\\n[ \\t]/.test(value)) return doubleQuotedString(value, ctx);\n }\n\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n const res = \"'\" + value.replace(/'/g, \"''\").replace(/\\n+/g, `$&\\n${indent}`) + \"'\";\n return ctx.implicitKey ? res : foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx));\n}\n\nfunction blockString({\n comment,\n type,\n value\n}, ctx, onComment, onChompKeep) {\n // 1. Block can't end in whitespace unless the last line is non-empty.\n // 2. Strings consisting of only whitespace are best rendered explicitly.\n if (/\\n[\\t ]+$/.test(value) || /^\\s*$/.test(value)) {\n return doubleQuotedString(value, ctx);\n }\n\n const indent = ctx.indent || (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');\n const indentSize = indent ? '2' : '1'; // root is at -1\n\n const literal = type === PlainValue.Type.BLOCK_FOLDED ? false : type === PlainValue.Type.BLOCK_LITERAL ? true : !lineLengthOverLimit(value, strOptions.fold.lineWidth - indent.length);\n let header = literal ? '|' : '>';\n if (!value) return header + '\\n';\n let wsStart = '';\n let wsEnd = '';\n value = value.replace(/[\\n\\t ]*$/, ws => {\n const n = ws.indexOf('\\n');\n\n if (n === -1) {\n header += '-'; // strip\n } else if (value === ws || n !== ws.length - 1) {\n header += '+'; // keep\n\n if (onChompKeep) onChompKeep();\n }\n\n wsEnd = ws.replace(/\\n$/, '');\n return '';\n }).replace(/^[\\n ]*/, ws => {\n if (ws.indexOf(' ') !== -1) header += indentSize;\n const m = ws.match(/ +$/);\n\n if (m) {\n wsStart = ws.slice(0, -m[0].length);\n return m[0];\n } else {\n wsStart = ws;\n return '';\n }\n });\n if (wsEnd) wsEnd = wsEnd.replace(/\\n+(?!\\n|$)/g, `$&${indent}`);\n if (wsStart) wsStart = wsStart.replace(/\\n+/g, `$&${indent}`);\n\n if (comment) {\n header += ' #' + comment.replace(/ ?[\\r\\n]+/g, ' ');\n if (onComment) onComment();\n }\n\n if (!value) return `${header}${indentSize}\\n${indent}${wsEnd}`;\n\n if (literal) {\n value = value.replace(/\\n+/g, `$&${indent}`);\n return `${header}\\n${indent}${wsStart}${value}${wsEnd}`;\n }\n\n value = value.replace(/\\n+/g, '\\n$&').replace(/(?:^|\\n)([\\t ].*)(?:([\\n\\t ]*)\\n(?![\\n\\t ]))?/g, '$1$2') // more-indented lines aren't folded\n // ^ ind.line ^ empty ^ capture next empty lines only at end of indent\n .replace(/\\n+/g, `$&${indent}`);\n const body = foldFlowLines(`${wsStart}${value}${wsEnd}`, indent, FOLD_BLOCK, strOptions.fold);\n return `${header}\\n${indent}${body}`;\n}\n\nfunction plainString(item, ctx, onComment, onChompKeep) {\n const {\n comment,\n type,\n value\n } = item;\n const {\n actualString,\n implicitKey,\n indent,\n inFlow\n } = ctx;\n\n if (implicitKey && /[\\n[\\]{},]/.test(value) || inFlow && /[[\\]{},]/.test(value)) {\n return doubleQuotedString(value, ctx);\n }\n\n if (!value || /^[\\n\\t ,[\\]{}#&*!|>'\"%@`]|^[?-]$|^[?-][ \\t]|[\\n:][ \\t]|[ \\t]\\n|[\\n\\t ]#|[\\n\\t :]$/.test(value)) {\n // not allowed:\n // - empty string, '-' or '?'\n // - start with an indicator character (except [?:-]) or /[?-] /\n // - '\\n ', ': ' or ' \\n' anywhere\n // - '#' not preceded by a non-space char\n // - end with ' ' or ':'\n return implicitKey || inFlow || value.indexOf('\\n') === -1 ? value.indexOf('\"') !== -1 && value.indexOf(\"'\") === -1 ? singleQuotedString(value, ctx) : doubleQuotedString(value, ctx) : blockString(item, ctx, onComment, onChompKeep);\n }\n\n if (!implicitKey && !inFlow && type !== PlainValue.Type.PLAIN && value.indexOf('\\n') !== -1) {\n // Where allowed & type not set explicitly, prefer block style for multiline strings\n return blockString(item, ctx, onComment, onChompKeep);\n }\n\n if (indent === '' && containsDocumentMarker(value)) {\n ctx.forceBlockIndent = true;\n return blockString(item, ctx, onComment, onChompKeep);\n }\n\n const str = value.replace(/\\n+/g, `$&\\n${indent}`); // Verify that output will be parsed as a string, as e.g. plain numbers and\n // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),\n // and others in v1.1.\n\n if (actualString) {\n const {\n tags\n } = ctx.doc.schema;\n const resolved = resolveScalar(str, tags, tags.scalarFallback).value;\n if (typeof resolved !== 'string') return doubleQuotedString(value, ctx);\n }\n\n const body = implicitKey ? str : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx));\n\n if (comment && !inFlow && (body.indexOf('\\n') !== -1 || comment.indexOf('\\n') !== -1)) {\n if (onComment) onComment();\n return addCommentBefore(body, indent, comment);\n }\n\n return body;\n}\n\nfunction stringifyString(item, ctx, onComment, onChompKeep) {\n const {\n defaultType\n } = strOptions;\n const {\n implicitKey,\n inFlow\n } = ctx;\n let {\n type,\n value\n } = item;\n\n if (typeof value !== 'string') {\n value = String(value);\n item = Object.assign({}, item, {\n value\n });\n }\n\n const _stringify = _type => {\n switch (_type) {\n case PlainValue.Type.BLOCK_FOLDED:\n case PlainValue.Type.BLOCK_LITERAL:\n return blockString(item, ctx, onComment, onChompKeep);\n\n case PlainValue.Type.QUOTE_DOUBLE:\n return doubleQuotedString(value, ctx);\n\n case PlainValue.Type.QUOTE_SINGLE:\n return singleQuotedString(value, ctx);\n\n case PlainValue.Type.PLAIN:\n return plainString(item, ctx, onComment, onChompKeep);\n\n default:\n return null;\n }\n };\n\n if (type !== PlainValue.Type.QUOTE_DOUBLE && /[\\x00-\\x08\\x0b-\\x1f\\x7f-\\x9f]/.test(value)) {\n // force double quotes on control characters\n type = PlainValue.Type.QUOTE_DOUBLE;\n } else if ((implicitKey || inFlow) && (type === PlainValue.Type.BLOCK_FOLDED || type === PlainValue.Type.BLOCK_LITERAL)) {\n // should not happen; blocks are not valid inside flow containers\n type = PlainValue.Type.QUOTE_DOUBLE;\n }\n\n let res = _stringify(type);\n\n if (res === null) {\n res = _stringify(defaultType);\n if (res === null) throw new Error(`Unsupported default string type ${defaultType}`);\n }\n\n return res;\n}\n\nfunction stringifyNumber({\n format,\n minFractionDigits,\n tag,\n value\n}) {\n if (typeof value === 'bigint') return String(value);\n if (!isFinite(value)) return isNaN(value) ? '.nan' : value < 0 ? '-.inf' : '.inf';\n let n = JSON.stringify(value);\n\n if (!format && minFractionDigits && (!tag || tag === 'tag:yaml.org,2002:float') && /^\\d/.test(n)) {\n let i = n.indexOf('.');\n\n if (i < 0) {\n i = n.length;\n n += '.';\n }\n\n let d = minFractionDigits - (n.length - i - 1);\n\n while (d-- > 0) n += '0';\n }\n\n return n;\n}\n\nfunction checkFlowCollectionEnd(errors, cst) {\n let char, name;\n\n switch (cst.type) {\n case PlainValue.Type.FLOW_MAP:\n char = '}';\n name = 'flow map';\n break;\n\n case PlainValue.Type.FLOW_SEQ:\n char = ']';\n name = 'flow sequence';\n break;\n\n default:\n errors.push(new PlainValue.YAMLSemanticError(cst, 'Not a flow collection!?'));\n return;\n }\n\n let lastItem;\n\n for (let i = cst.items.length - 1; i >= 0; --i) {\n const item = cst.items[i];\n\n if (!item || item.type !== PlainValue.Type.COMMENT) {\n lastItem = item;\n break;\n }\n }\n\n if (lastItem && lastItem.char !== char) {\n const msg = `Expected ${name} to end with ${char}`;\n let err;\n\n if (typeof lastItem.offset === 'number') {\n err = new PlainValue.YAMLSemanticError(cst, msg);\n err.offset = lastItem.offset + 1;\n } else {\n err = new PlainValue.YAMLSemanticError(lastItem, msg);\n if (lastItem.range && lastItem.range.end) err.offset = lastItem.range.end - lastItem.range.start;\n }\n\n errors.push(err);\n }\n}\nfunction checkFlowCommentSpace(errors, comment) {\n const prev = comment.context.src[comment.range.start - 1];\n\n if (prev !== '\\n' && prev !== '\\t' && prev !== ' ') {\n const msg = 'Comments must be separated from other tokens by white space characters';\n errors.push(new PlainValue.YAMLSemanticError(comment, msg));\n }\n}\nfunction getLongKeyError(source, key) {\n const sk = String(key);\n const k = sk.substr(0, 8) + '...' + sk.substr(-8);\n return new PlainValue.YAMLSemanticError(source, `The \"${k}\" key is too long`);\n}\nfunction resolveComments(collection, comments) {\n for (const {\n afterKey,\n before,\n comment\n } of comments) {\n let item = collection.items[before];\n\n if (!item) {\n if (comment !== undefined) {\n if (collection.comment) collection.comment += '\\n' + comment;else collection.comment = comment;\n }\n } else {\n if (afterKey && item.value) item = item.value;\n\n if (comment === undefined) {\n if (afterKey || !item.commentBefore) item.spaceBefore = true;\n } else {\n if (item.commentBefore) item.commentBefore += '\\n' + comment;else item.commentBefore = comment;\n }\n }\n }\n}\n\n// on error, will return { str: string, errors: Error[] }\nfunction resolveString(doc, node) {\n const res = node.strValue;\n if (!res) return '';\n if (typeof res === 'string') return res;\n res.errors.forEach(error => {\n if (!error.source) error.source = node;\n doc.errors.push(error);\n });\n return res.str;\n}\n\nfunction resolveTagHandle(doc, node) {\n const {\n handle,\n suffix\n } = node.tag;\n let prefix = doc.tagPrefixes.find(p => p.handle === handle);\n\n if (!prefix) {\n const dtp = doc.getDefaults().tagPrefixes;\n if (dtp) prefix = dtp.find(p => p.handle === handle);\n if (!prefix) throw new PlainValue.YAMLSemanticError(node, `The ${handle} tag handle is non-default and was not declared.`);\n }\n\n if (!suffix) throw new PlainValue.YAMLSemanticError(node, `The ${handle} tag has no suffix.`);\n\n if (handle === '!' && (doc.version || doc.options.version) === '1.0') {\n if (suffix[0] === '^') {\n doc.warnings.push(new PlainValue.YAMLWarning(node, 'YAML 1.0 ^ tag expansion is not supported'));\n return suffix;\n }\n\n if (/[:/]/.test(suffix)) {\n // word/foo -> tag:word.yaml.org,2002:foo\n const vocab = suffix.match(/^([a-z0-9-]+)\\/(.*)/i);\n return vocab ? `tag:${vocab[1]}.yaml.org,2002:${vocab[2]}` : `tag:${suffix}`;\n }\n }\n\n return prefix.prefix + decodeURIComponent(suffix);\n}\n\nfunction resolveTagName(doc, node) {\n const {\n tag,\n type\n } = node;\n let nonSpecific = false;\n\n if (tag) {\n const {\n handle,\n suffix,\n verbatim\n } = tag;\n\n if (verbatim) {\n if (verbatim !== '!' && verbatim !== '!!') return verbatim;\n const msg = `Verbatim tags aren't resolved, so ${verbatim} is invalid.`;\n doc.errors.push(new PlainValue.YAMLSemanticError(node, msg));\n } else if (handle === '!' && !suffix) {\n nonSpecific = true;\n } else {\n try {\n return resolveTagHandle(doc, node);\n } catch (error) {\n doc.errors.push(error);\n }\n }\n }\n\n switch (type) {\n case PlainValue.Type.BLOCK_FOLDED:\n case PlainValue.Type.BLOCK_LITERAL:\n case PlainValue.Type.QUOTE_DOUBLE:\n case PlainValue.Type.QUOTE_SINGLE:\n return PlainValue.defaultTags.STR;\n\n case PlainValue.Type.FLOW_MAP:\n case PlainValue.Type.MAP:\n return PlainValue.defaultTags.MAP;\n\n case PlainValue.Type.FLOW_SEQ:\n case PlainValue.Type.SEQ:\n return PlainValue.defaultTags.SEQ;\n\n case PlainValue.Type.PLAIN:\n return nonSpecific ? PlainValue.defaultTags.STR : null;\n\n default:\n return null;\n }\n}\n\nfunction resolveByTagName(doc, node, tagName) {\n const {\n tags\n } = doc.schema;\n const matchWithTest = [];\n\n for (const tag of tags) {\n if (tag.tag === tagName) {\n if (tag.test) matchWithTest.push(tag);else {\n const res = tag.resolve(doc, node);\n return res instanceof Collection ? res : new Scalar(res);\n }\n }\n }\n\n const str = resolveString(doc, node);\n if (typeof str === 'string' && matchWithTest.length > 0) return resolveScalar(str, matchWithTest, tags.scalarFallback);\n return null;\n}\n\nfunction getFallbackTagName({\n type\n}) {\n switch (type) {\n case PlainValue.Type.FLOW_MAP:\n case PlainValue.Type.MAP:\n return PlainValue.defaultTags.MAP;\n\n case PlainValue.Type.FLOW_SEQ:\n case PlainValue.Type.SEQ:\n return PlainValue.defaultTags.SEQ;\n\n default:\n return PlainValue.defaultTags.STR;\n }\n}\n\nfunction resolveTag(doc, node, tagName) {\n try {\n const res = resolveByTagName(doc, node, tagName);\n\n if (res) {\n if (tagName && node.tag) res.tag = tagName;\n return res;\n }\n } catch (error) {\n /* istanbul ignore if */\n if (!error.source) error.source = node;\n doc.errors.push(error);\n return null;\n }\n\n try {\n const fallback = getFallbackTagName(node);\n if (!fallback) throw new Error(`The tag ${tagName} is unavailable`);\n const msg = `The tag ${tagName} is unavailable, falling back to ${fallback}`;\n doc.warnings.push(new PlainValue.YAMLWarning(node, msg));\n const res = resolveByTagName(doc, node, fallback);\n res.tag = tagName;\n return res;\n } catch (error) {\n const refError = new PlainValue.YAMLReferenceError(node, error.message);\n refError.stack = error.stack;\n doc.errors.push(refError);\n return null;\n }\n}\n\nconst isCollectionItem = node => {\n if (!node) return false;\n const {\n type\n } = node;\n return type === PlainValue.Type.MAP_KEY || type === PlainValue.Type.MAP_VALUE || type === PlainValue.Type.SEQ_ITEM;\n};\n\nfunction resolveNodeProps(errors, node) {\n const comments = {\n before: [],\n after: []\n };\n let hasAnchor = false;\n let hasTag = false;\n const props = isCollectionItem(node.context.parent) ? node.context.parent.props.concat(node.props) : node.props;\n\n for (const {\n start,\n end\n } of props) {\n switch (node.context.src[start]) {\n case PlainValue.Char.COMMENT:\n {\n if (!node.commentHasRequiredWhitespace(start)) {\n const msg = 'Comments must be separated from other tokens by white space characters';\n errors.push(new PlainValue.YAMLSemanticError(node, msg));\n }\n\n const {\n header,\n valueRange\n } = node;\n const cc = valueRange && (start > valueRange.start || header && start > header.start) ? comments.after : comments.before;\n cc.push(node.context.src.slice(start + 1, end));\n break;\n }\n // Actual anchor & tag resolution is handled by schema, here we just complain\n\n case PlainValue.Char.ANCHOR:\n if (hasAnchor) {\n const msg = 'A node can have at most one anchor';\n errors.push(new PlainValue.YAMLSemanticError(node, msg));\n }\n\n hasAnchor = true;\n break;\n\n case PlainValue.Char.TAG:\n if (hasTag) {\n const msg = 'A node can have at most one tag';\n errors.push(new PlainValue.YAMLSemanticError(node, msg));\n }\n\n hasTag = true;\n break;\n }\n }\n\n return {\n comments,\n hasAnchor,\n hasTag\n };\n}\n\nfunction resolveNodeValue(doc, node) {\n const {\n anchors,\n errors,\n schema\n } = doc;\n\n if (node.type === PlainValue.Type.ALIAS) {\n const name = node.rawValue;\n const src = anchors.getNode(name);\n\n if (!src) {\n const msg = `Aliased anchor not found: ${name}`;\n errors.push(new PlainValue.YAMLReferenceError(node, msg));\n return null;\n } // Lazy resolution for circular references\n\n\n const res = new Alias(src);\n\n anchors._cstAliases.push(res);\n\n return res;\n }\n\n const tagName = resolveTagName(doc, node);\n if (tagName) return resolveTag(doc, node, tagName);\n\n if (node.type !== PlainValue.Type.PLAIN) {\n const msg = `Failed to resolve ${node.type} node here`;\n errors.push(new PlainValue.YAMLSyntaxError(node, msg));\n return null;\n }\n\n try {\n const str = resolveString(doc, node);\n return resolveScalar(str, schema.tags, schema.tags.scalarFallback);\n } catch (error) {\n if (!error.source) error.source = node;\n errors.push(error);\n return null;\n }\n} // sets node.resolved on success\n\n\nfunction resolveNode(doc, node) {\n if (!node) return null;\n if (node.error) doc.errors.push(node.error);\n const {\n comments,\n hasAnchor,\n hasTag\n } = resolveNodeProps(doc.errors, node);\n\n if (hasAnchor) {\n const {\n anchors\n } = doc;\n const name = node.anchor;\n const prev = anchors.getNode(name); // At this point, aliases for any preceding node with the same anchor\n // name have already been resolved, so it may safely be renamed.\n\n if (prev) anchors.map[anchors.newName(name)] = prev; // During parsing, we need to store the CST node in anchors.map as\n // anchors need to be available during resolution to allow for\n // circular references.\n\n anchors.map[name] = node;\n }\n\n if (node.type === PlainValue.Type.ALIAS && (hasAnchor || hasTag)) {\n const msg = 'An alias node must not specify any properties';\n doc.errors.push(new PlainValue.YAMLSemanticError(node, msg));\n }\n\n const res = resolveNodeValue(doc, node);\n\n if (res) {\n res.range = [node.range.start, node.range.end];\n if (doc.options.keepCstNodes) res.cstNode = node;\n if (doc.options.keepNodeTypes) res.type = node.type;\n const cb = comments.before.join('\\n');\n\n if (cb) {\n res.commentBefore = res.commentBefore ? `${res.commentBefore}\\n${cb}` : cb;\n }\n\n const ca = comments.after.join('\\n');\n if (ca) res.comment = res.comment ? `${res.comment}\\n${ca}` : ca;\n }\n\n return node.resolved = res;\n}\n\nfunction resolveMap(doc, cst) {\n if (cst.type !== PlainValue.Type.MAP && cst.type !== PlainValue.Type.FLOW_MAP) {\n const msg = `A ${cst.type} node cannot be resolved as a mapping`;\n doc.errors.push(new PlainValue.YAMLSyntaxError(cst, msg));\n return null;\n }\n\n const {\n comments,\n items\n } = cst.type === PlainValue.Type.FLOW_MAP ? resolveFlowMapItems(doc, cst) : resolveBlockMapItems(doc, cst);\n const map = new YAMLMap();\n map.items = items;\n resolveComments(map, comments);\n let hasCollectionKey = false;\n\n for (let i = 0; i < items.length; ++i) {\n const {\n key: iKey\n } = items[i];\n if (iKey instanceof Collection) hasCollectionKey = true;\n\n if (doc.schema.merge && iKey && iKey.value === MERGE_KEY) {\n items[i] = new Merge(items[i]);\n const sources = items[i].value.items;\n let error = null;\n sources.some(node => {\n if (node instanceof Alias) {\n // During parsing, alias sources are CST nodes; to account for\n // circular references their resolved values can't be used here.\n const {\n type\n } = node.source;\n if (type === PlainValue.Type.MAP || type === PlainValue.Type.FLOW_MAP) return false;\n return error = 'Merge nodes aliases can only point to maps';\n }\n\n return error = 'Merge nodes can only have Alias nodes as values';\n });\n if (error) doc.errors.push(new PlainValue.YAMLSemanticError(cst, error));\n } else {\n for (let j = i + 1; j < items.length; ++j) {\n const {\n key: jKey\n } = items[j];\n\n if (iKey === jKey || iKey && jKey && Object.prototype.hasOwnProperty.call(iKey, 'value') && iKey.value === jKey.value) {\n const msg = `Map keys must be unique; \"${iKey}\" is repeated`;\n doc.errors.push(new PlainValue.YAMLSemanticError(cst, msg));\n break;\n }\n }\n }\n }\n\n if (hasCollectionKey && !doc.options.mapAsMap) {\n const warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.';\n doc.warnings.push(new PlainValue.YAMLWarning(cst, warn));\n }\n\n cst.resolved = map;\n return map;\n}\n\nconst valueHasPairComment = ({\n context: {\n lineStart,\n node,\n src\n },\n props\n}) => {\n if (props.length === 0) return false;\n const {\n start\n } = props[0];\n if (node && start > node.valueRange.start) return false;\n if (src[start] !== PlainValue.Char.COMMENT) return false;\n\n for (let i = lineStart; i < start; ++i) if (src[i] === '\\n') return false;\n\n return true;\n};\n\nfunction resolvePairComment(item, pair) {\n if (!valueHasPairComment(item)) return;\n const comment = item.getPropValue(0, PlainValue.Char.COMMENT, true);\n let found = false;\n const cb = pair.value.commentBefore;\n\n if (cb && cb.startsWith(comment)) {\n pair.value.commentBefore = cb.substr(comment.length + 1);\n found = true;\n } else {\n const cc = pair.value.comment;\n\n if (!item.node && cc && cc.startsWith(comment)) {\n pair.value.comment = cc.substr(comment.length + 1);\n found = true;\n }\n }\n\n if (found) pair.comment = comment;\n}\n\nfunction resolveBlockMapItems(doc, cst) {\n const comments = [];\n const items = [];\n let key = undefined;\n let keyStart = null;\n\n for (let i = 0; i < cst.items.length; ++i) {\n const item = cst.items[i];\n\n switch (item.type) {\n case PlainValue.Type.BLANK_LINE:\n comments.push({\n afterKey: !!key,\n before: items.length\n });\n break;\n\n case PlainValue.Type.COMMENT:\n comments.push({\n afterKey: !!key,\n before: items.length,\n comment: item.comment\n });\n break;\n\n case PlainValue.Type.MAP_KEY:\n if (key !== undefined) items.push(new Pair(key));\n if (item.error) doc.errors.push(item.error);\n key = resolveNode(doc, item.node);\n keyStart = null;\n break;\n\n case PlainValue.Type.MAP_VALUE:\n {\n if (key === undefined) key = null;\n if (item.error) doc.errors.push(item.error);\n\n if (!item.context.atLineStart && item.node && item.node.type === PlainValue.Type.MAP && !item.node.context.atLineStart) {\n const msg = 'Nested mappings are not allowed in compact mappings';\n doc.errors.push(new PlainValue.YAMLSemanticError(item.node, msg));\n }\n\n let valueNode = item.node;\n\n if (!valueNode && item.props.length > 0) {\n // Comments on an empty mapping value need to be preserved, so we\n // need to construct a minimal empty node here to use instead of the\n // missing `item.node`. -- eemeli/yaml#19\n valueNode = new PlainValue.PlainValue(PlainValue.Type.PLAIN, []);\n valueNode.context = {\n parent: item,\n src: item.context.src\n };\n const pos = item.range.start + 1;\n valueNode.range = {\n start: pos,\n end: pos\n };\n valueNode.valueRange = {\n start: pos,\n end: pos\n };\n\n if (typeof item.range.origStart === 'number') {\n const origPos = item.range.origStart + 1;\n valueNode.range.origStart = valueNode.range.origEnd = origPos;\n valueNode.valueRange.origStart = valueNode.valueRange.origEnd = origPos;\n }\n }\n\n const pair = new Pair(key, resolveNode(doc, valueNode));\n resolvePairComment(item, pair);\n items.push(pair);\n\n if (key && typeof keyStart === 'number') {\n if (item.range.start > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key));\n }\n\n key = undefined;\n keyStart = null;\n }\n break;\n\n default:\n if (key !== undefined) items.push(new Pair(key));\n key = resolveNode(doc, item);\n keyStart = item.range.start;\n if (item.error) doc.errors.push(item.error);\n\n next: for (let j = i + 1;; ++j) {\n const nextItem = cst.items[j];\n\n switch (nextItem && nextItem.type) {\n case PlainValue.Type.BLANK_LINE:\n case PlainValue.Type.COMMENT:\n continue next;\n\n case PlainValue.Type.MAP_VALUE:\n break next;\n\n default:\n {\n const msg = 'Implicit map keys need to be followed by map values';\n doc.errors.push(new PlainValue.YAMLSemanticError(item, msg));\n break next;\n }\n }\n }\n\n if (item.valueRangeContainsNewline) {\n const msg = 'Implicit map keys need to be on a single line';\n doc.errors.push(new PlainValue.YAMLSemanticError(item, msg));\n }\n\n }\n }\n\n if (key !== undefined) items.push(new Pair(key));\n return {\n comments,\n items\n };\n}\n\nfunction resolveFlowMapItems(doc, cst) {\n const comments = [];\n const items = [];\n let key = undefined;\n let explicitKey = false;\n let next = '{';\n\n for (let i = 0; i < cst.items.length; ++i) {\n const item = cst.items[i];\n\n if (typeof item.char === 'string') {\n const {\n char,\n offset\n } = item;\n\n if (char === '?' && key === undefined && !explicitKey) {\n explicitKey = true;\n next = ':';\n continue;\n }\n\n if (char === ':') {\n if (key === undefined) key = null;\n\n if (next === ':') {\n next = ',';\n continue;\n }\n } else {\n if (explicitKey) {\n if (key === undefined && char !== ',') key = null;\n explicitKey = false;\n }\n\n if (key !== undefined) {\n items.push(new Pair(key));\n key = undefined;\n\n if (char === ',') {\n next = ':';\n continue;\n }\n }\n }\n\n if (char === '}') {\n if (i === cst.items.length - 1) continue;\n } else if (char === next) {\n next = ':';\n continue;\n }\n\n const msg = `Flow map contains an unexpected ${char}`;\n const err = new PlainValue.YAMLSyntaxError(cst, msg);\n err.offset = offset;\n doc.errors.push(err);\n } else if (item.type === PlainValue.Type.BLANK_LINE) {\n comments.push({\n afterKey: !!key,\n before: items.length\n });\n } else if (item.type === PlainValue.Type.COMMENT) {\n checkFlowCommentSpace(doc.errors, item);\n comments.push({\n afterKey: !!key,\n before: items.length,\n comment: item.comment\n });\n } else if (key === undefined) {\n if (next === ',') doc.errors.push(new PlainValue.YAMLSemanticError(item, 'Separator , missing in flow map'));\n key = resolveNode(doc, item);\n } else {\n if (next !== ',') doc.errors.push(new PlainValue.YAMLSemanticError(item, 'Indicator : missing in flow map entry'));\n items.push(new Pair(key, resolveNode(doc, item)));\n key = undefined;\n explicitKey = false;\n }\n }\n\n checkFlowCollectionEnd(doc.errors, cst);\n if (key !== undefined) items.push(new Pair(key));\n return {\n comments,\n items\n };\n}\n\nfunction resolveSeq(doc, cst) {\n if (cst.type !== PlainValue.Type.SEQ && cst.type !== PlainValue.Type.FLOW_SEQ) {\n const msg = `A ${cst.type} node cannot be resolved as a sequence`;\n doc.errors.push(new PlainValue.YAMLSyntaxError(cst, msg));\n return null;\n }\n\n const {\n comments,\n items\n } = cst.type === PlainValue.Type.FLOW_SEQ ? resolveFlowSeqItems(doc, cst) : resolveBlockSeqItems(doc, cst);\n const seq = new YAMLSeq();\n seq.items = items;\n resolveComments(seq, comments);\n\n if (!doc.options.mapAsMap && items.some(it => it instanceof Pair && it.key instanceof Collection)) {\n const warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.';\n doc.warnings.push(new PlainValue.YAMLWarning(cst, warn));\n }\n\n cst.resolved = seq;\n return seq;\n}\n\nfunction resolveBlockSeqItems(doc, cst) {\n const comments = [];\n const items = [];\n\n for (let i = 0; i < cst.items.length; ++i) {\n const item = cst.items[i];\n\n switch (item.type) {\n case PlainValue.Type.BLANK_LINE:\n comments.push({\n before: items.length\n });\n break;\n\n case PlainValue.Type.COMMENT:\n comments.push({\n comment: item.comment,\n before: items.length\n });\n break;\n\n case PlainValue.Type.SEQ_ITEM:\n if (item.error) doc.errors.push(item.error);\n items.push(resolveNode(doc, item.node));\n\n if (item.hasProps) {\n const msg = 'Sequence items cannot have tags or anchors before the - indicator';\n doc.errors.push(new PlainValue.YAMLSemanticError(item, msg));\n }\n\n break;\n\n default:\n if (item.error) doc.errors.push(item.error);\n doc.errors.push(new PlainValue.YAMLSyntaxError(item, `Unexpected ${item.type} node in sequence`));\n }\n }\n\n return {\n comments,\n items\n };\n}\n\nfunction resolveFlowSeqItems(doc, cst) {\n const comments = [];\n const items = [];\n let explicitKey = false;\n let key = undefined;\n let keyStart = null;\n let next = '[';\n let prevItem = null;\n\n for (let i = 0; i < cst.items.length; ++i) {\n const item = cst.items[i];\n\n if (typeof item.char === 'string') {\n const {\n char,\n offset\n } = item;\n\n if (char !== ':' && (explicitKey || key !== undefined)) {\n if (explicitKey && key === undefined) key = next ? items.pop() : null;\n items.push(new Pair(key));\n explicitKey = false;\n key = undefined;\n keyStart = null;\n }\n\n if (char === next) {\n next = null;\n } else if (!next && char === '?') {\n explicitKey = true;\n } else if (next !== '[' && char === ':' && key === undefined) {\n if (next === ',') {\n key = items.pop();\n\n if (key instanceof Pair) {\n const msg = 'Chaining flow sequence pairs is invalid';\n const err = new PlainValue.YAMLSemanticError(cst, msg);\n err.offset = offset;\n doc.errors.push(err);\n }\n\n if (!explicitKey && typeof keyStart === 'number') {\n const keyEnd = item.range ? item.range.start : item.offset;\n if (keyEnd > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key));\n const {\n src\n } = prevItem.context;\n\n for (let i = keyStart; i < keyEnd; ++i) if (src[i] === '\\n') {\n const msg = 'Implicit keys of flow sequence pairs need to be on a single line';\n doc.errors.push(new PlainValue.YAMLSemanticError(prevItem, msg));\n break;\n }\n }\n } else {\n key = null;\n }\n\n keyStart = null;\n explicitKey = false;\n next = null;\n } else if (next === '[' || char !== ']' || i < cst.items.length - 1) {\n const msg = `Flow sequence contains an unexpected ${char}`;\n const err = new PlainValue.YAMLSyntaxError(cst, msg);\n err.offset = offset;\n doc.errors.push(err);\n }\n } else if (item.type === PlainValue.Type.BLANK_LINE) {\n comments.push({\n before: items.length\n });\n } else if (item.type === PlainValue.Type.COMMENT) {\n checkFlowCommentSpace(doc.errors, item);\n comments.push({\n comment: item.comment,\n before: items.length\n });\n } else {\n if (next) {\n const msg = `Expected a ${next} in flow sequence`;\n doc.errors.push(new PlainValue.YAMLSemanticError(item, msg));\n }\n\n const value = resolveNode(doc, item);\n\n if (key === undefined) {\n items.push(value);\n prevItem = item;\n } else {\n items.push(new Pair(key, value));\n key = undefined;\n }\n\n keyStart = item.range.start;\n next = ',';\n }\n }\n\n checkFlowCollectionEnd(doc.errors, cst);\n if (key !== undefined) items.push(new Pair(key));\n return {\n comments,\n items\n };\n}\n\nexports.Alias = Alias;\nexports.Collection = Collection;\nexports.Merge = Merge;\nexports.Node = Node;\nexports.Pair = Pair;\nexports.Scalar = Scalar;\nexports.YAMLMap = YAMLMap;\nexports.YAMLSeq = YAMLSeq;\nexports.addComment = addComment;\nexports.binaryOptions = binaryOptions;\nexports.boolOptions = boolOptions;\nexports.findPair = findPair;\nexports.intOptions = intOptions;\nexports.isEmptyPath = isEmptyPath;\nexports.nullOptions = nullOptions;\nexports.resolveMap = resolveMap;\nexports.resolveNode = resolveNode;\nexports.resolveSeq = resolveSeq;\nexports.resolveString = resolveString;\nexports.strOptions = strOptions;\nexports.stringifyNumber = stringifyNumber;\nexports.stringifyString = stringifyString;\nexports.toJSON = toJSON;\n","'use strict';\n\nvar PlainValue = require('./PlainValue-ec8e588e.js');\nvar resolveSeq = require('./resolveSeq-4a68b39b.js');\n\n/* global atob, btoa, Buffer */\nconst binary = {\n identify: value => value instanceof Uint8Array,\n // Buffer inherits from Uint8Array\n default: false,\n tag: 'tag:yaml.org,2002:binary',\n\n /**\n * Returns a Buffer in node and an Uint8Array in browsers\n *\n * To use the resulting buffer as an image, you'll want to do something like:\n *\n * const blob = new Blob([buffer], { type: 'image/jpeg' })\n * document.querySelector('#photo').src = URL.createObjectURL(blob)\n */\n resolve: (doc, node) => {\n const src = resolveSeq.resolveString(doc, node);\n\n if (typeof Buffer === 'function') {\n return Buffer.from(src, 'base64');\n } else if (typeof atob === 'function') {\n // On IE 11, atob() can't handle newlines\n const str = atob(src.replace(/[\\n\\r]/g, ''));\n const buffer = new Uint8Array(str.length);\n\n for (let i = 0; i < str.length; ++i) buffer[i] = str.charCodeAt(i);\n\n return buffer;\n } else {\n const msg = 'This environment does not support reading binary tags; either Buffer or atob is required';\n doc.errors.push(new PlainValue.YAMLReferenceError(node, msg));\n return null;\n }\n },\n options: resolveSeq.binaryOptions,\n stringify: ({\n comment,\n type,\n value\n }, ctx, onComment, onChompKeep) => {\n let src;\n\n if (typeof Buffer === 'function') {\n src = value instanceof Buffer ? value.toString('base64') : Buffer.from(value.buffer).toString('base64');\n } else if (typeof btoa === 'function') {\n let s = '';\n\n for (let i = 0; i < value.length; ++i) s += String.fromCharCode(value[i]);\n\n src = btoa(s);\n } else {\n throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');\n }\n\n if (!type) type = resolveSeq.binaryOptions.defaultType;\n\n if (type === PlainValue.Type.QUOTE_DOUBLE) {\n value = src;\n } else {\n const {\n lineWidth\n } = resolveSeq.binaryOptions;\n const n = Math.ceil(src.length / lineWidth);\n const lines = new Array(n);\n\n for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {\n lines[i] = src.substr(o, lineWidth);\n }\n\n value = lines.join(type === PlainValue.Type.BLOCK_LITERAL ? '\\n' : ' ');\n }\n\n return resolveSeq.stringifyString({\n comment,\n type,\n value\n }, ctx, onComment, onChompKeep);\n }\n};\n\nfunction parsePairs(doc, cst) {\n const seq = resolveSeq.resolveSeq(doc, cst);\n\n for (let i = 0; i < seq.items.length; ++i) {\n let item = seq.items[i];\n if (item instanceof resolveSeq.Pair) continue;else if (item instanceof resolveSeq.YAMLMap) {\n if (item.items.length > 1) {\n const msg = 'Each pair must have its own sequence indicator';\n throw new PlainValue.YAMLSemanticError(cst, msg);\n }\n\n const pair = item.items[0] || new resolveSeq.Pair();\n if (item.commentBefore) pair.commentBefore = pair.commentBefore ? `${item.commentBefore}\\n${pair.commentBefore}` : item.commentBefore;\n if (item.comment) pair.comment = pair.comment ? `${item.comment}\\n${pair.comment}` : item.comment;\n item = pair;\n }\n seq.items[i] = item instanceof resolveSeq.Pair ? item : new resolveSeq.Pair(item);\n }\n\n return seq;\n}\nfunction createPairs(schema, iterable, ctx) {\n const pairs = new resolveSeq.YAMLSeq(schema);\n pairs.tag = 'tag:yaml.org,2002:pairs';\n\n for (const it of iterable) {\n let key, value;\n\n if (Array.isArray(it)) {\n if (it.length === 2) {\n key = it[0];\n value = it[1];\n } else throw new TypeError(`Expected [key, value] tuple: ${it}`);\n } else if (it && it instanceof Object) {\n const keys = Object.keys(it);\n\n if (keys.length === 1) {\n key = keys[0];\n value = it[key];\n } else throw new TypeError(`Expected { key: value } tuple: ${it}`);\n } else {\n key = it;\n }\n\n const pair = schema.createPair(key, value, ctx);\n pairs.items.push(pair);\n }\n\n return pairs;\n}\nconst pairs = {\n default: false,\n tag: 'tag:yaml.org,2002:pairs',\n resolve: parsePairs,\n createNode: createPairs\n};\n\nclass YAMLOMap extends resolveSeq.YAMLSeq {\n constructor() {\n super();\n\n PlainValue._defineProperty(this, \"add\", resolveSeq.YAMLMap.prototype.add.bind(this));\n\n PlainValue._defineProperty(this, \"delete\", resolveSeq.YAMLMap.prototype.delete.bind(this));\n\n PlainValue._defineProperty(this, \"get\", resolveSeq.YAMLMap.prototype.get.bind(this));\n\n PlainValue._defineProperty(this, \"has\", resolveSeq.YAMLMap.prototype.has.bind(this));\n\n PlainValue._defineProperty(this, \"set\", resolveSeq.YAMLMap.prototype.set.bind(this));\n\n this.tag = YAMLOMap.tag;\n }\n\n toJSON(_, ctx) {\n const map = new Map();\n if (ctx && ctx.onCreate) ctx.onCreate(map);\n\n for (const pair of this.items) {\n let key, value;\n\n if (pair instanceof resolveSeq.Pair) {\n key = resolveSeq.toJSON(pair.key, '', ctx);\n value = resolveSeq.toJSON(pair.value, key, ctx);\n } else {\n key = resolveSeq.toJSON(pair, '', ctx);\n }\n\n if (map.has(key)) throw new Error('Ordered maps must not include duplicate keys');\n map.set(key, value);\n }\n\n return map;\n }\n\n}\n\nPlainValue._defineProperty(YAMLOMap, \"tag\", 'tag:yaml.org,2002:omap');\n\nfunction parseOMap(doc, cst) {\n const pairs = parsePairs(doc, cst);\n const seenKeys = [];\n\n for (const {\n key\n } of pairs.items) {\n if (key instanceof resolveSeq.Scalar) {\n if (seenKeys.includes(key.value)) {\n const msg = 'Ordered maps must not include duplicate keys';\n throw new PlainValue.YAMLSemanticError(cst, msg);\n } else {\n seenKeys.push(key.value);\n }\n }\n }\n\n return Object.assign(new YAMLOMap(), pairs);\n}\n\nfunction createOMap(schema, iterable, ctx) {\n const pairs = createPairs(schema, iterable, ctx);\n const omap = new YAMLOMap();\n omap.items = pairs.items;\n return omap;\n}\n\nconst omap = {\n identify: value => value instanceof Map,\n nodeClass: YAMLOMap,\n default: false,\n tag: 'tag:yaml.org,2002:omap',\n resolve: parseOMap,\n createNode: createOMap\n};\n\nclass YAMLSet extends resolveSeq.YAMLMap {\n constructor() {\n super();\n this.tag = YAMLSet.tag;\n }\n\n add(key) {\n const pair = key instanceof resolveSeq.Pair ? key : new resolveSeq.Pair(key);\n const prev = resolveSeq.findPair(this.items, pair.key);\n if (!prev) this.items.push(pair);\n }\n\n get(key, keepPair) {\n const pair = resolveSeq.findPair(this.items, key);\n return !keepPair && pair instanceof resolveSeq.Pair ? pair.key instanceof resolveSeq.Scalar ? pair.key.value : pair.key : pair;\n }\n\n set(key, value) {\n if (typeof value !== 'boolean') throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);\n const prev = resolveSeq.findPair(this.items, key);\n\n if (prev && !value) {\n this.items.splice(this.items.indexOf(prev), 1);\n } else if (!prev && value) {\n this.items.push(new resolveSeq.Pair(key));\n }\n }\n\n toJSON(_, ctx) {\n return super.toJSON(_, ctx, Set);\n }\n\n toString(ctx, onComment, onChompKeep) {\n if (!ctx) return JSON.stringify(this);\n if (this.hasAllNullValues()) return super.toString(ctx, onComment, onChompKeep);else throw new Error('Set items must all have null values');\n }\n\n}\n\nPlainValue._defineProperty(YAMLSet, \"tag\", 'tag:yaml.org,2002:set');\n\nfunction parseSet(doc, cst) {\n const map = resolveSeq.resolveMap(doc, cst);\n if (!map.hasAllNullValues()) throw new PlainValue.YAMLSemanticError(cst, 'Set items must all have null values');\n return Object.assign(new YAMLSet(), map);\n}\n\nfunction createSet(schema, iterable, ctx) {\n const set = new YAMLSet();\n\n for (const value of iterable) set.items.push(schema.createPair(value, null, ctx));\n\n return set;\n}\n\nconst set = {\n identify: value => value instanceof Set,\n nodeClass: YAMLSet,\n default: false,\n tag: 'tag:yaml.org,2002:set',\n resolve: parseSet,\n createNode: createSet\n};\n\nconst parseSexagesimal = (sign, parts) => {\n const n = parts.split(':').reduce((n, p) => n * 60 + Number(p), 0);\n return sign === '-' ? -n : n;\n}; // hhhh:mm:ss.sss\n\n\nconst stringifySexagesimal = ({\n value\n}) => {\n if (isNaN(value) || !isFinite(value)) return resolveSeq.stringifyNumber(value);\n let sign = '';\n\n if (value < 0) {\n sign = '-';\n value = Math.abs(value);\n }\n\n const parts = [value % 60]; // seconds, including ms\n\n if (value < 60) {\n parts.unshift(0); // at least one : is required\n } else {\n value = Math.round((value - parts[0]) / 60);\n parts.unshift(value % 60); // minutes\n\n if (value >= 60) {\n value = Math.round((value - parts[0]) / 60);\n parts.unshift(value); // hours\n }\n }\n\n return sign + parts.map(n => n < 10 ? '0' + String(n) : String(n)).join(':').replace(/000000\\d*$/, '') // % 60 may introduce error\n ;\n};\n\nconst intTime = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'TIME',\n test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+)$/,\n resolve: (str, sign, parts) => parseSexagesimal(sign, parts.replace(/_/g, '')),\n stringify: stringifySexagesimal\n};\nconst floatTime = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'TIME',\n test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*)$/,\n resolve: (str, sign, parts) => parseSexagesimal(sign, parts.replace(/_/g, '')),\n stringify: stringifySexagesimal\n};\nconst timestamp = {\n identify: value => value instanceof Date,\n default: true,\n tag: 'tag:yaml.org,2002:timestamp',\n // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part\n // may be omitted altogether, resulting in a date format. In such a case, the time part is\n // assumed to be 00:00:00Z (start of day, UTC).\n test: RegExp('^(?:' + '([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd\n '(?:(?:t|T|[ \\\\t]+)' + // t | T | whitespace\n '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?\n '(?:[ \\\\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30\n ')?' + ')$'),\n resolve: (str, year, month, day, hour, minute, second, millisec, tz) => {\n if (millisec) millisec = (millisec + '00').substr(1, 3);\n let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec || 0);\n\n if (tz && tz !== 'Z') {\n let d = parseSexagesimal(tz[0], tz.slice(1));\n if (Math.abs(d) < 30) d *= 60;\n date -= 60000 * d;\n }\n\n return new Date(date);\n },\n stringify: ({\n value\n }) => value.toISOString().replace(/((T00:00)?:00)?\\.000Z$/, '')\n};\n\n/* global console, process, YAML_SILENCE_DEPRECATION_WARNINGS, YAML_SILENCE_WARNINGS */\nfunction shouldWarn(deprecation) {\n const env = typeof process !== 'undefined' && process.env || {};\n\n if (deprecation) {\n if (typeof YAML_SILENCE_DEPRECATION_WARNINGS !== 'undefined') return !YAML_SILENCE_DEPRECATION_WARNINGS;\n return !env.YAML_SILENCE_DEPRECATION_WARNINGS;\n }\n\n if (typeof YAML_SILENCE_WARNINGS !== 'undefined') return !YAML_SILENCE_WARNINGS;\n return !env.YAML_SILENCE_WARNINGS;\n}\n\nfunction warn(warning, type) {\n if (shouldWarn(false)) {\n const emit = typeof process !== 'undefined' && process.emitWarning; // This will throw in Jest if `warning` is an Error instance due to\n // https://github.com/facebook/jest/issues/2549\n\n if (emit) emit(warning, type);else {\n // eslint-disable-next-line no-console\n console.warn(type ? `${type}: ${warning}` : warning);\n }\n }\n}\nfunction warnFileDeprecation(filename) {\n if (shouldWarn(true)) {\n const path = filename.replace(/.*yaml[/\\\\]/i, '').replace(/\\.js$/, '').replace(/\\\\/g, '/');\n warn(`The endpoint 'yaml/${path}' will be removed in a future release.`, 'DeprecationWarning');\n }\n}\nconst warned = {};\nfunction warnOptionDeprecation(name, alternative) {\n if (!warned[name] && shouldWarn(true)) {\n warned[name] = true;\n let msg = `The option '${name}' will be removed in a future release`;\n msg += alternative ? `, use '${alternative}' instead.` : '.';\n warn(msg, 'DeprecationWarning');\n }\n}\n\nexports.binary = binary;\nexports.floatTime = floatTime;\nexports.intTime = intTime;\nexports.omap = omap;\nexports.pairs = pairs;\nexports.set = set;\nexports.timestamp = timestamp;\nexports.warn = warn;\nexports.warnFileDeprecation = warnFileDeprecation;\nexports.warnOptionDeprecation = warnOptionDeprecation;\n","module.exports = require('./dist').YAML\n","module.exports = require(\"assert\");;","module.exports = require(\"child_process\");;","module.exports = require(\"crypto\");;","module.exports = require(\"events\");;","module.exports = require(\"fs\");;","module.exports = require(\"http\");;","module.exports = require(\"https\");;","module.exports = require(\"net\");;","module.exports = require(\"os\");;","module.exports = require(\"path\");;","module.exports = require(\"stream\");;","module.exports = require(\"tls\");;","module.exports = require(\"util\");;","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tif(__webpack_module_cache__[moduleId]) {\n\t\treturn __webpack_module_cache__[moduleId].exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\n__webpack_require__.ab = __dirname + \"/\";","// module exports must be returned from runtime so entry inlining is disabled\n// startup\n// Load entry module and return exports\nreturn __webpack_require__(109);\n"],"mappings":";;;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACpGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AC5HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACxEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AChGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AC9OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACxlBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACxhBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AClSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACtEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;AC9lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;AC7jDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;AC9BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;AC/MA;AACA;AACA;A;;;;;;ACFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACzQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACtvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AC72BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AC3gBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACptDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;;ACpkEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;A;;;;;ACjaA;AACA;AACA;A;;;;;;ACFA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;;;ACDA;AACA;A;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC5BA;AACA;ACDA;AACA;AACA;AACA;;A","sourceRoot":""} \ No newline at end of file diff --git a/dist/licenses.txt b/dist/licenses.txt new file mode 100644 index 0000000..96481a6 --- /dev/null +++ b/dist/licenses.txt @@ -0,0 +1,158 @@ +@actions/core +MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@actions/exec +MIT + +@actions/http-client +MIT +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +@actions/io +MIT + +@actions/tool-cache +MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +ini +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +semver +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +tunnel +MIT +The MIT License (MIT) + +Copyright (c) 2012 Koichi Kobayashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +uuid +MIT +The MIT License (MIT) + +Copyright (c) 2010-2016 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +yaml +ISC +Copyright 2018 Eemeli Aro + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. diff --git a/dist/sourcemap-register.js b/dist/sourcemap-register.js new file mode 100644 index 0000000..e6b6e66 --- /dev/null +++ b/dist/sourcemap-register.js @@ -0,0 +1 @@ +module.exports=(()=>{var e={650:e=>{var r=Object.prototype.toString;var n=typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},645:(e,r,n)=>{n(284).install()},284:(e,r,n)=>{var t=n(596).SourceMapConsumer;var o=n(622);var i;try{i=n(747);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var u=n(650);var s=false;var a=false;var l=false;var c="auto";var f={};var p={};var g=/^data:application\/json[^,]+base64,/;var h=[];var d=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var u=true;var s=this.isConstructor();var a=!(this.isToplevel()||s);if(a){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(s){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;u=false}if(u){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach(function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]});r.toString=CallSiteToString;return r}function wrapCallSite(e){if(e.isNative()){return e}var r=e.getFileName()||e.getScriptNameOrSourceURL();if(r){var n=e.getLineNumber();var t=e.getColumnNumber()-1;var o=62;if(n===1&&t>o&&!isInBrowser()&&!e.isEval()){t-=o}var i=mapSourcePosition({source:r,line:n,column:t});e=cloneCallSite(e);var u=e.getFunctionName;e.getFunctionName=function(){return i.name||u()};e.getFileName=function(){return i.source};e.getLineNumber=function(){return i.line};e.getColumnNumber=function(){return i.column+1};e.getScriptNameOrSourceURL=function(){return i.source};return e}var s=e.isEval()&&e.getEvalOrigin();if(s){s=mapEvalOrigin(s);e=cloneCallSite(e);e.getEvalOrigin=function(){return s};return e}return e}function prepareStackTrace(e,r){if(l){f={};p={}}return e+r.map(function(e){return"\n at "+wrapCallSite(e)}).join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var u=f[n];if(!u&&i&&i.existsSync(n)){try{u=i.readFileSync(n,"utf8")}catch(e){u=""}}if(u){var s=u.split(/(?:\r\n|\r|\n)/)[t-1];if(s){return n+":"+t+"\n"+s+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);if(process.stderr._handle&&process.stderr._handle.setBlocking){process.stderr._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);process.exit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=h.slice(0);var m=d.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=_;r.install=function(e){e=e||{};if(e.environment){c=e.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(e.retrieveFile){if(e.overrideRetrieveFile){h.length=0}h.unshift(e.retrieveFile)}if(e.retrieveSourceMap){if(e.overrideRetrieveSourceMap){d.length=0}d.unshift(e.retrieveSourceMap)}if(e.hookRequire&&!isInBrowser()){var r;try{r=n(282)}catch(e){}var t=r.prototype._compile;if(!t.__sourceMapSupport){r.prototype._compile=function(e,r){f[r]=e;p[r]=undefined;return t.call(this,e,r)};r.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in e?e.emptyCacheBetweenOperations:false}if(!s){s=true;Error.prepareStackTrace=prepareStackTrace}if(!a){var o="handleUncaughtExceptions"in e?e.handleUncaughtExceptions:true;if(o&&hasGlobalProcessEventEmitter()){a=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){h.length=0;d.length=0;h=S.slice(0);d=m.slice(0)}},837:(e,r,n)=>{var t=n(983);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(537);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&u;i>>>=o;if(i>0){n|=s}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var a=0;var l=0;var c,f;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}f=t.decode(e.charCodeAt(r++));if(f===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(f&s);f&=u;a=a+(f<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,u){var s=Math.floor((n-e)/2)+e;var a=i(t,o[s],true);if(a===0){return s}else if(a>0){if(n-s>1){return recursiveSearch(s,n,t,o,i,u)}if(u==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,s,t,o,i,u)}if(u==r.LEAST_UPPER_BOUND){return s}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},740:(e,r,n)=>{var t=n(983);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var u=r.generatedColumn;return o>n||o==n&&u>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.H=MappingList},226:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(983);var i=n(164);var u=n(837).I;var s=n(215);var a=n(226).U;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var u;switch(i){case SourceMapConsumer.GENERATED_ORDER:u=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:u=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var s=this.sourceRoot;u.map(function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(s,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}},this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var u=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(u>=0){var s=this._originalMappings[u];if(e.column===undefined){var a=s.originalLine;while(s&&s.originalLine===a){t.push({line:o.getArg(s,"generatedLine",null),column:o.getArg(s,"generatedColumn",null),lastColumn:o.getArg(s,"lastGeneratedColumn",null)});s=this._originalMappings[++u]}}else{var l=s.originalColumn;while(s&&s.originalLine===r&&s.originalColumn==l){t.push({line:o.getArg(s,"generatedLine",null),column:o.getArg(s,"generatedColumn",null),lastColumn:o.getArg(s,"lastGeneratedColumn",null)});s=this._originalMappings[++u]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var s=o.getArg(n,"names",[]);var a=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var f=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(a){a=o.normalize(a)}i=i.map(String).map(o.normalize).map(function(e){return a&&o.isAbsolute(a)&&o.isAbsolute(e)?o.relative(a,e):e});this._names=u.fromArray(s.map(String),true);this._sources=u.fromArray(i,true);this._absoluteSources=this._sources.toArray().map(function(e){return o.computeSourceURL(a,e,r)});this.sourceRoot=a;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=f}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){_.source=l+m[1];l+=m[1];_.originalLine=i+m[2];i=_.originalLine;_.originalLine+=1;_.originalColumn=u+m[3];u=_.originalColumn;if(m.length>4){_.name=c+m[4];c+=m[4]}}v.push(_);if(typeof _.originalLine==="number"){d.push(_)}}}a(v,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=v;a(d,o.compareByOriginalPositions);this.__originalMappings=d};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,u){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,u)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var u=o.getArg(t,"name",null);if(u!==null){u=this._names.at(u)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:u}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some(function(e){return e==null})};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var u=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(u)){return this.sourcesContent[this._sources.indexOf(u)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new u;this._names=new u;var s={line:-1,column:0};this._sections=i.map(function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(215);var o=n(983);var i=n(837).I;var u=n(740).H;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new u;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping(function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)});e.sources.forEach(function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var u=e.sourceContentFor(t);if(u!=null){n.setSourceContent(t,u)}});return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var u=this._sourceRoot;if(u!=null){t=o.relative(u,t)}var s=new i;var a=new i;this._mappings.unsortedForEach(function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(u!=null){r.source=o.relative(u,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!s.has(l)){s.add(l)}var c=r.name;if(c!=null&&!a.has(c)){a.add(c)}},this);this._sources=s;this._names=a;e.sources.forEach(function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(u!=null){r=o.relative(u,r)}this.setSourceContent(r,t)}},this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var u=0;var s=0;var a="";var l;var c;var f;var p;var g=this._mappings.toArray();for(var h=0,d=g.length;h0){if(!o.compareByGeneratedPositionsInflated(c,g[h-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){p=this._sources.indexOf(c.source);l+=t.encode(p-s);s=p;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){f=this._names.indexOf(c.name);l+=t.encode(f-u);u=f}}a+=l}return a};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map(function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null},this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.h=SourceMapGenerator},990:(e,r,n)=>{var t;var o=n(341).h;var i=n(983);var u=/(\r?\n)/;var s=10;var a="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[a]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(u);var s=0;var a=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return s=0;r--){this.prepend(e[r])}}else if(e[a]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var u,s=0,a=i.length-1;a>=0;a--){u=i[a];if(u==="."){i.splice(a,1)}else if(u===".."){s++}else if(s>0){if(u===""){i.splice(a+1,s);s=0}else{i.splice(a,2);s--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},596:(e,r,n)=>{n(341).h;r.SourceMapConsumer=n(327).SourceMapConsumer;n(990)},747:e=>{"use strict";e.exports=require("fs")},282:e=>{"use strict";e.exports=require("module")},622:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){if(r[n]){return r[n].exports}var t=r[n]={exports:{}};var o=true;try{e[n](t,t.exports,__webpack_require__);o=false}finally{if(o)delete r[n]}return t.exports}__webpack_require__.ab=__dirname+"/";return __webpack_require__(645)})(); \ No newline at end of file